Source file
src/reflect/value.go
1
2
3
4
5 package reflect
6
7 import (
8 "errors"
9 "internal/abi"
10 "internal/goarch"
11 "internal/itoa"
12 "internal/unsafeheader"
13 "math"
14 "runtime"
15 "unsafe"
16 )
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39 type Value struct {
40
41 typ *rtype
42
43
44
45 ptr unsafe.Pointer
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61 flag
62
63
64
65
66
67
68 }
69
70 type flag uintptr
71
72 const (
73 flagKindWidth = 5
74 flagKindMask flag = 1<<flagKindWidth - 1
75 flagStickyRO flag = 1 << 5
76 flagEmbedRO flag = 1 << 6
77 flagIndir flag = 1 << 7
78 flagAddr flag = 1 << 8
79 flagMethod flag = 1 << 9
80 flagMethodShift = 10
81 flagRO flag = flagStickyRO | flagEmbedRO
82 )
83
84 func (f flag) kind() Kind {
85 return Kind(f & flagKindMask)
86 }
87
88 func (f flag) ro() flag {
89 if f&flagRO != 0 {
90 return flagStickyRO
91 }
92 return 0
93 }
94
95
96
97
98 func (v Value) pointer() unsafe.Pointer {
99 if v.typ.size != goarch.PtrSize || !v.typ.pointers() {
100 panic("can't call pointer on a non-pointer Value")
101 }
102 if v.flag&flagIndir != 0 {
103 return *(*unsafe.Pointer)(v.ptr)
104 }
105 return v.ptr
106 }
107
108
109 func packEface(v Value) any {
110 t := v.typ
111 var i any
112 e := (*emptyInterface)(unsafe.Pointer(&i))
113
114 switch {
115 case ifaceIndir(t):
116 if v.flag&flagIndir == 0 {
117 panic("bad indir")
118 }
119
120 ptr := v.ptr
121 if v.flag&flagAddr != 0 {
122
123
124 c := unsafe_New(t)
125 typedmemmove(t, c, ptr)
126 ptr = c
127 }
128 e.word = ptr
129 case v.flag&flagIndir != 0:
130
131
132 e.word = *(*unsafe.Pointer)(v.ptr)
133 default:
134
135 e.word = v.ptr
136 }
137
138
139
140
141 e.typ = t
142 return i
143 }
144
145
146 func unpackEface(i any) Value {
147 e := (*emptyInterface)(unsafe.Pointer(&i))
148
149 t := e.typ
150 if t == nil {
151 return Value{}
152 }
153 f := flag(t.Kind())
154 if ifaceIndir(t) {
155 f |= flagIndir
156 }
157 return Value{t, e.word, f}
158 }
159
160
161
162
163 type ValueError struct {
164 Method string
165 Kind Kind
166 }
167
168 func (e *ValueError) Error() string {
169 if e.Kind == 0 {
170 return "reflect: call of " + e.Method + " on zero Value"
171 }
172 return "reflect: call of " + e.Method + " on " + e.Kind.String() + " Value"
173 }
174
175
176 func valueMethodName() string {
177 var pc [5]uintptr
178 n := runtime.Callers(1, pc[:])
179 frames := runtime.CallersFrames(pc[:n])
180 var frame runtime.Frame
181 for more := true; more; {
182 const prefix = "reflect.Value."
183 frame, more = frames.Next()
184 name := frame.Function
185 if len(name) > len(prefix) && name[:len(prefix)] == prefix {
186 methodName := name[len(prefix):]
187 if len(methodName) > 0 && 'A' <= methodName[0] && methodName[0] <= 'Z' {
188 return name
189 }
190 }
191 }
192 return "unknown method"
193 }
194
195
196 type emptyInterface struct {
197 typ *rtype
198 word unsafe.Pointer
199 }
200
201
202 type nonEmptyInterface struct {
203
204 itab *struct {
205 ityp *rtype
206 typ *rtype
207 hash uint32
208 _ [4]byte
209 fun [100000]unsafe.Pointer
210 }
211 word unsafe.Pointer
212 }
213
214
215
216
217
218
219
220 func (f flag) mustBe(expected Kind) {
221
222 if Kind(f&flagKindMask) != expected {
223 panic(&ValueError{valueMethodName(), f.kind()})
224 }
225 }
226
227
228
229 func (f flag) mustBeExported() {
230 if f == 0 || f&flagRO != 0 {
231 f.mustBeExportedSlow()
232 }
233 }
234
235 func (f flag) mustBeExportedSlow() {
236 if f == 0 {
237 panic(&ValueError{valueMethodName(), Invalid})
238 }
239 if f&flagRO != 0 {
240 panic("reflect: " + valueMethodName() + " using value obtained using unexported field")
241 }
242 }
243
244
245
246
247 func (f flag) mustBeAssignable() {
248 if f&flagRO != 0 || f&flagAddr == 0 {
249 f.mustBeAssignableSlow()
250 }
251 }
252
253 func (f flag) mustBeAssignableSlow() {
254 if f == 0 {
255 panic(&ValueError{valueMethodName(), Invalid})
256 }
257
258 if f&flagRO != 0 {
259 panic("reflect: " + valueMethodName() + " using value obtained using unexported field")
260 }
261 if f&flagAddr == 0 {
262 panic("reflect: " + valueMethodName() + " using unaddressable value")
263 }
264 }
265
266
267
268
269
270
271 func (v Value) Addr() Value {
272 if v.flag&flagAddr == 0 {
273 panic("reflect.Value.Addr of unaddressable value")
274 }
275
276
277 fl := v.flag & flagRO
278 return Value{v.typ.ptrTo(), v.ptr, fl | flag(Pointer)}
279 }
280
281
282
283 func (v Value) Bool() bool {
284
285 if v.kind() != Bool {
286 v.panicNotBool()
287 }
288 return *(*bool)(v.ptr)
289 }
290
291 func (v Value) panicNotBool() {
292 v.mustBe(Bool)
293 }
294
295 var bytesType = rtypeOf(([]byte)(nil))
296
297
298
299
300 func (v Value) Bytes() []byte {
301
302 if v.typ == bytesType {
303 return *(*[]byte)(v.ptr)
304 }
305 return v.bytesSlow()
306 }
307
308 func (v Value) bytesSlow() []byte {
309 switch v.kind() {
310 case Slice:
311 if v.typ.Elem().Kind() != Uint8 {
312 panic("reflect.Value.Bytes of non-byte slice")
313 }
314
315 return *(*[]byte)(v.ptr)
316 case Array:
317 if v.typ.Elem().Kind() != Uint8 {
318 panic("reflect.Value.Bytes of non-byte array")
319 }
320 if !v.CanAddr() {
321 panic("reflect.Value.Bytes of unaddressable byte array")
322 }
323 p := (*byte)(v.ptr)
324 n := int((*arrayType)(unsafe.Pointer(v.typ)).len)
325 return unsafe.Slice(p, n)
326 }
327 panic(&ValueError{"reflect.Value.Bytes", v.kind()})
328 }
329
330
331
332 func (v Value) runes() []rune {
333 v.mustBe(Slice)
334 if v.typ.Elem().Kind() != Int32 {
335 panic("reflect.Value.Bytes of non-rune slice")
336 }
337
338 return *(*[]rune)(v.ptr)
339 }
340
341
342
343
344
345
346 func (v Value) CanAddr() bool {
347 return v.flag&flagAddr != 0
348 }
349
350
351
352
353
354
355 func (v Value) CanSet() bool {
356 return v.flag&(flagAddr|flagRO) == flagAddr
357 }
358
359
360
361
362
363
364
365
366
367 func (v Value) Call(in []Value) []Value {
368 v.mustBe(Func)
369 v.mustBeExported()
370 return v.call("Call", in)
371 }
372
373
374
375
376
377
378
379
380 func (v Value) CallSlice(in []Value) []Value {
381 v.mustBe(Func)
382 v.mustBeExported()
383 return v.call("CallSlice", in)
384 }
385
386 var callGC bool
387
388 const debugReflectCall = false
389
390 func (v Value) call(op string, in []Value) []Value {
391
392 t := (*funcType)(unsafe.Pointer(v.typ))
393 var (
394 fn unsafe.Pointer
395 rcvr Value
396 rcvrtype *rtype
397 )
398 if v.flag&flagMethod != 0 {
399 rcvr = v
400 rcvrtype, t, fn = methodReceiver(op, v, int(v.flag)>>flagMethodShift)
401 } else if v.flag&flagIndir != 0 {
402 fn = *(*unsafe.Pointer)(v.ptr)
403 } else {
404 fn = v.ptr
405 }
406
407 if fn == nil {
408 panic("reflect.Value.Call: call of nil function")
409 }
410
411 isSlice := op == "CallSlice"
412 n := t.NumIn()
413 isVariadic := t.IsVariadic()
414 if isSlice {
415 if !isVariadic {
416 panic("reflect: CallSlice of non-variadic function")
417 }
418 if len(in) < n {
419 panic("reflect: CallSlice with too few input arguments")
420 }
421 if len(in) > n {
422 panic("reflect: CallSlice with too many input arguments")
423 }
424 } else {
425 if isVariadic {
426 n--
427 }
428 if len(in) < n {
429 panic("reflect: Call with too few input arguments")
430 }
431 if !isVariadic && len(in) > n {
432 panic("reflect: Call with too many input arguments")
433 }
434 }
435 for _, x := range in {
436 if x.Kind() == Invalid {
437 panic("reflect: " + op + " using zero Value argument")
438 }
439 }
440 for i := 0; i < n; i++ {
441 if xt, targ := in[i].Type(), t.In(i); !xt.AssignableTo(targ) {
442 panic("reflect: " + op + " using " + xt.String() + " as type " + targ.String())
443 }
444 }
445 if !isSlice && isVariadic {
446
447 m := len(in) - n
448 slice := MakeSlice(t.In(n), m, m)
449 elem := t.In(n).Elem()
450 for i := 0; i < m; i++ {
451 x := in[n+i]
452 if xt := x.Type(); !xt.AssignableTo(elem) {
453 panic("reflect: cannot use " + xt.String() + " as type " + elem.String() + " in " + op)
454 }
455 slice.Index(i).Set(x)
456 }
457 origIn := in
458 in = make([]Value, n+1)
459 copy(in[:n], origIn)
460 in[n] = slice
461 }
462
463 nin := len(in)
464 if nin != t.NumIn() {
465 panic("reflect.Value.Call: wrong argument count")
466 }
467 nout := t.NumOut()
468
469
470 var regArgs abi.RegArgs
471
472
473 frametype, framePool, abid := funcLayout(t, rcvrtype)
474
475
476 var stackArgs unsafe.Pointer
477 if frametype.size != 0 {
478 if nout == 0 {
479 stackArgs = framePool.Get().(unsafe.Pointer)
480 } else {
481
482
483 stackArgs = unsafe_New(frametype)
484 }
485 }
486 frameSize := frametype.size
487
488 if debugReflectCall {
489 println("reflect.call", t.String())
490 abid.dump()
491 }
492
493
494
495
496 inStart := 0
497 if rcvrtype != nil {
498
499
500
501 switch st := abid.call.steps[0]; st.kind {
502 case abiStepStack:
503 storeRcvr(rcvr, stackArgs)
504 case abiStepPointer:
505 storeRcvr(rcvr, unsafe.Pointer(®Args.Ptrs[st.ireg]))
506 fallthrough
507 case abiStepIntReg:
508 storeRcvr(rcvr, unsafe.Pointer(®Args.Ints[st.ireg]))
509 case abiStepFloatReg:
510 storeRcvr(rcvr, unsafe.Pointer(®Args.Floats[st.freg]))
511 default:
512 panic("unknown ABI parameter kind")
513 }
514 inStart = 1
515 }
516
517
518 for i, v := range in {
519 v.mustBeExported()
520 targ := t.In(i).(*rtype)
521
522
523
524 v = v.assignTo("reflect.Value.Call", targ, nil)
525 stepsLoop:
526 for _, st := range abid.call.stepsForValue(i + inStart) {
527 switch st.kind {
528 case abiStepStack:
529
530 addr := add(stackArgs, st.stkOff, "precomputed stack arg offset")
531 if v.flag&flagIndir != 0 {
532 typedmemmove(targ, addr, v.ptr)
533 } else {
534 *(*unsafe.Pointer)(addr) = v.ptr
535 }
536
537 break stepsLoop
538 case abiStepIntReg, abiStepPointer:
539
540 if v.flag&flagIndir != 0 {
541 offset := add(v.ptr, st.offset, "precomputed value offset")
542 if st.kind == abiStepPointer {
543
544
545
546 regArgs.Ptrs[st.ireg] = *(*unsafe.Pointer)(offset)
547 }
548 intToReg(®Args, st.ireg, st.size, offset)
549 } else {
550 if st.kind == abiStepPointer {
551
552 regArgs.Ptrs[st.ireg] = v.ptr
553 }
554 regArgs.Ints[st.ireg] = uintptr(v.ptr)
555 }
556 case abiStepFloatReg:
557
558 if v.flag&flagIndir == 0 {
559 panic("attempted to copy pointer to FP register")
560 }
561 offset := add(v.ptr, st.offset, "precomputed value offset")
562 floatToReg(®Args, st.freg, st.size, offset)
563 default:
564 panic("unknown ABI part kind")
565 }
566 }
567 }
568
569
570 frameSize = align(frameSize, goarch.PtrSize)
571 frameSize += abid.spill
572
573
574 regArgs.ReturnIsPtr = abid.outRegPtrs
575
576 if debugReflectCall {
577 regArgs.Dump()
578 }
579
580
581 if callGC {
582 runtime.GC()
583 }
584
585
586 call(frametype, fn, stackArgs, uint32(frametype.size), uint32(abid.retOffset), uint32(frameSize), ®Args)
587
588
589 if callGC {
590 runtime.GC()
591 }
592
593 var ret []Value
594 if nout == 0 {
595 if stackArgs != nil {
596 typedmemclr(frametype, stackArgs)
597 framePool.Put(stackArgs)
598 }
599 } else {
600 if stackArgs != nil {
601
602
603
604 typedmemclrpartial(frametype, stackArgs, 0, abid.retOffset)
605 }
606
607
608 ret = make([]Value, nout)
609 for i := 0; i < nout; i++ {
610 tv := t.Out(i)
611 if tv.Size() == 0 {
612
613
614 ret[i] = Zero(tv)
615 continue
616 }
617 steps := abid.ret.stepsForValue(i)
618 if st := steps[0]; st.kind == abiStepStack {
619
620
621
622 fl := flagIndir | flag(tv.Kind())
623 ret[i] = Value{tv.common(), add(stackArgs, st.stkOff, "tv.Size() != 0"), fl}
624
625
626
627
628 continue
629 }
630
631
632 if !ifaceIndir(tv.common()) {
633
634
635 if steps[0].kind != abiStepPointer {
636 print("kind=", steps[0].kind, ", type=", tv.String(), "\n")
637 panic("mismatch between ABI description and types")
638 }
639 ret[i] = Value{tv.common(), regArgs.Ptrs[steps[0].ireg], flag(tv.Kind())}
640 continue
641 }
642
643
644
645
646
647
648
649
650
651
652 s := unsafe_New(tv.common())
653 for _, st := range steps {
654 switch st.kind {
655 case abiStepIntReg:
656 offset := add(s, st.offset, "precomputed value offset")
657 intFromReg(®Args, st.ireg, st.size, offset)
658 case abiStepPointer:
659 s := add(s, st.offset, "precomputed value offset")
660 *((*unsafe.Pointer)(s)) = regArgs.Ptrs[st.ireg]
661 case abiStepFloatReg:
662 offset := add(s, st.offset, "precomputed value offset")
663 floatFromReg(®Args, st.freg, st.size, offset)
664 case abiStepStack:
665 panic("register-based return value has stack component")
666 default:
667 panic("unknown ABI part kind")
668 }
669 }
670 ret[i] = Value{tv.common(), s, flagIndir | flag(tv.Kind())}
671 }
672 }
673
674 return ret
675 }
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697 func callReflect(ctxt *makeFuncImpl, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
698 if callGC {
699
700
701
702
703
704 runtime.GC()
705 }
706 ftyp := ctxt.ftyp
707 f := ctxt.fn
708
709 _, _, abid := funcLayout(ftyp, nil)
710
711
712 ptr := frame
713 in := make([]Value, 0, int(ftyp.inCount))
714 for i, typ := range ftyp.in() {
715 if typ.Size() == 0 {
716 in = append(in, Zero(typ))
717 continue
718 }
719 v := Value{typ, nil, flag(typ.Kind())}
720 steps := abid.call.stepsForValue(i)
721 if st := steps[0]; st.kind == abiStepStack {
722 if ifaceIndir(typ) {
723
724
725
726
727 v.ptr = unsafe_New(typ)
728 if typ.size > 0 {
729 typedmemmove(typ, v.ptr, add(ptr, st.stkOff, "typ.size > 0"))
730 }
731 v.flag |= flagIndir
732 } else {
733 v.ptr = *(*unsafe.Pointer)(add(ptr, st.stkOff, "1-ptr"))
734 }
735 } else {
736 if ifaceIndir(typ) {
737
738
739 v.flag |= flagIndir
740 v.ptr = unsafe_New(typ)
741 for _, st := range steps {
742 switch st.kind {
743 case abiStepIntReg:
744 offset := add(v.ptr, st.offset, "precomputed value offset")
745 intFromReg(regs, st.ireg, st.size, offset)
746 case abiStepPointer:
747 s := add(v.ptr, st.offset, "precomputed value offset")
748 *((*unsafe.Pointer)(s)) = regs.Ptrs[st.ireg]
749 case abiStepFloatReg:
750 offset := add(v.ptr, st.offset, "precomputed value offset")
751 floatFromReg(regs, st.freg, st.size, offset)
752 case abiStepStack:
753 panic("register-based return value has stack component")
754 default:
755 panic("unknown ABI part kind")
756 }
757 }
758 } else {
759
760
761 if steps[0].kind != abiStepPointer {
762 print("kind=", steps[0].kind, ", type=", typ.String(), "\n")
763 panic("mismatch between ABI description and types")
764 }
765 v.ptr = regs.Ptrs[steps[0].ireg]
766 }
767 }
768 in = append(in, v)
769 }
770
771
772 out := f(in)
773 numOut := ftyp.NumOut()
774 if len(out) != numOut {
775 panic("reflect: wrong return count from function created by MakeFunc")
776 }
777
778
779 if numOut > 0 {
780 for i, typ := range ftyp.out() {
781 v := out[i]
782 if v.typ == nil {
783 panic("reflect: function created by MakeFunc using " + funcName(f) +
784 " returned zero Value")
785 }
786 if v.flag&flagRO != 0 {
787 panic("reflect: function created by MakeFunc using " + funcName(f) +
788 " returned value obtained from unexported field")
789 }
790 if typ.size == 0 {
791 continue
792 }
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808 v = v.assignTo("reflect.MakeFunc", typ, nil)
809 stepsLoop:
810 for _, st := range abid.ret.stepsForValue(i) {
811 switch st.kind {
812 case abiStepStack:
813
814 addr := add(ptr, st.stkOff, "precomputed stack arg offset")
815
816
817
818
819 if v.flag&flagIndir != 0 {
820 memmove(addr, v.ptr, st.size)
821 } else {
822
823 *(*uintptr)(addr) = uintptr(v.ptr)
824 }
825
826 break stepsLoop
827 case abiStepIntReg, abiStepPointer:
828
829 if v.flag&flagIndir != 0 {
830 offset := add(v.ptr, st.offset, "precomputed value offset")
831 intToReg(regs, st.ireg, st.size, offset)
832 } else {
833
834
835
836
837
838 regs.Ints[st.ireg] = uintptr(v.ptr)
839 }
840 case abiStepFloatReg:
841
842 if v.flag&flagIndir == 0 {
843 panic("attempted to copy pointer to FP register")
844 }
845 offset := add(v.ptr, st.offset, "precomputed value offset")
846 floatToReg(regs, st.freg, st.size, offset)
847 default:
848 panic("unknown ABI part kind")
849 }
850 }
851 }
852 }
853
854
855
856 *retValid = true
857
858
859
860
861
862 runtime.KeepAlive(out)
863
864
865
866
867 runtime.KeepAlive(ctxt)
868 }
869
870
871
872
873
874
875
876
877 func methodReceiver(op string, v Value, methodIndex int) (rcvrtype *rtype, t *funcType, fn unsafe.Pointer) {
878 i := methodIndex
879 if v.typ.Kind() == Interface {
880 tt := (*interfaceType)(unsafe.Pointer(v.typ))
881 if uint(i) >= uint(len(tt.methods)) {
882 panic("reflect: internal error: invalid method index")
883 }
884 m := &tt.methods[i]
885 if !tt.nameOff(m.name).isExported() {
886 panic("reflect: " + op + " of unexported method")
887 }
888 iface := (*nonEmptyInterface)(v.ptr)
889 if iface.itab == nil {
890 panic("reflect: " + op + " of method on nil interface value")
891 }
892 rcvrtype = iface.itab.typ
893 fn = unsafe.Pointer(&iface.itab.fun[i])
894 t = (*funcType)(unsafe.Pointer(tt.typeOff(m.typ)))
895 } else {
896 rcvrtype = v.typ
897 ms := v.typ.exportedMethods()
898 if uint(i) >= uint(len(ms)) {
899 panic("reflect: internal error: invalid method index")
900 }
901 m := ms[i]
902 if !v.typ.nameOff(m.name).isExported() {
903 panic("reflect: " + op + " of unexported method")
904 }
905 ifn := v.typ.textOff(m.ifn)
906 fn = unsafe.Pointer(&ifn)
907 t = (*funcType)(unsafe.Pointer(v.typ.typeOff(m.mtyp)))
908 }
909 return
910 }
911
912
913
914
915
916 func storeRcvr(v Value, p unsafe.Pointer) {
917 t := v.typ
918 if t.Kind() == Interface {
919
920 iface := (*nonEmptyInterface)(v.ptr)
921 *(*unsafe.Pointer)(p) = iface.word
922 } else if v.flag&flagIndir != 0 && !ifaceIndir(t) {
923 *(*unsafe.Pointer)(p) = *(*unsafe.Pointer)(v.ptr)
924 } else {
925 *(*unsafe.Pointer)(p) = v.ptr
926 }
927 }
928
929
930
931 func align(x, n uintptr) uintptr {
932 return (x + n - 1) &^ (n - 1)
933 }
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954 func callMethod(ctxt *methodValue, frame unsafe.Pointer, retValid *bool, regs *abi.RegArgs) {
955 rcvr := ctxt.rcvr
956 rcvrType, valueFuncType, methodFn := methodReceiver("call", rcvr, ctxt.method)
957
958
959
960
961
962
963
964
965
966 _, _, valueABI := funcLayout(valueFuncType, nil)
967 valueFrame, valueRegs := frame, regs
968 methodFrameType, methodFramePool, methodABI := funcLayout(valueFuncType, rcvrType)
969
970
971
972 methodFrame := methodFramePool.Get().(unsafe.Pointer)
973 var methodRegs abi.RegArgs
974
975
976 switch st := methodABI.call.steps[0]; st.kind {
977 case abiStepStack:
978
979
980 storeRcvr(rcvr, methodFrame)
981 case abiStepPointer:
982
983 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ptrs[st.ireg]))
984 fallthrough
985 case abiStepIntReg:
986 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Ints[st.ireg]))
987 case abiStepFloatReg:
988 storeRcvr(rcvr, unsafe.Pointer(&methodRegs.Floats[st.freg]))
989 default:
990 panic("unknown ABI parameter kind")
991 }
992
993
994 for i, t := range valueFuncType.in() {
995 valueSteps := valueABI.call.stepsForValue(i)
996 methodSteps := methodABI.call.stepsForValue(i + 1)
997
998
999 if len(valueSteps) == 0 {
1000 if len(methodSteps) != 0 {
1001 panic("method ABI and value ABI do not align")
1002 }
1003 continue
1004 }
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016 if vStep := valueSteps[0]; vStep.kind == abiStepStack {
1017 mStep := methodSteps[0]
1018
1019 if mStep.kind == abiStepStack {
1020 if vStep.size != mStep.size {
1021 panic("method ABI and value ABI do not align")
1022 }
1023 typedmemmove(t,
1024 add(methodFrame, mStep.stkOff, "precomputed stack offset"),
1025 add(valueFrame, vStep.stkOff, "precomputed stack offset"))
1026 continue
1027 }
1028
1029 for _, mStep := range methodSteps {
1030 from := add(valueFrame, vStep.stkOff+mStep.offset, "precomputed stack offset")
1031 switch mStep.kind {
1032 case abiStepPointer:
1033
1034 methodRegs.Ptrs[mStep.ireg] = *(*unsafe.Pointer)(from)
1035 fallthrough
1036 case abiStepIntReg:
1037 intToReg(&methodRegs, mStep.ireg, mStep.size, from)
1038 case abiStepFloatReg:
1039 floatToReg(&methodRegs, mStep.freg, mStep.size, from)
1040 default:
1041 panic("unexpected method step")
1042 }
1043 }
1044 continue
1045 }
1046
1047 if mStep := methodSteps[0]; mStep.kind == abiStepStack {
1048 for _, vStep := range valueSteps {
1049 to := add(methodFrame, mStep.stkOff+vStep.offset, "precomputed stack offset")
1050 switch vStep.kind {
1051 case abiStepPointer:
1052
1053 *(*unsafe.Pointer)(to) = valueRegs.Ptrs[vStep.ireg]
1054 case abiStepIntReg:
1055 intFromReg(valueRegs, vStep.ireg, vStep.size, to)
1056 case abiStepFloatReg:
1057 floatFromReg(valueRegs, vStep.freg, vStep.size, to)
1058 default:
1059 panic("unexpected value step")
1060 }
1061 }
1062 continue
1063 }
1064
1065 if len(valueSteps) != len(methodSteps) {
1066
1067
1068
1069 panic("method ABI and value ABI don't align")
1070 }
1071 for i, vStep := range valueSteps {
1072 mStep := methodSteps[i]
1073 if mStep.kind != vStep.kind {
1074 panic("method ABI and value ABI don't align")
1075 }
1076 switch vStep.kind {
1077 case abiStepPointer:
1078
1079 methodRegs.Ptrs[mStep.ireg] = valueRegs.Ptrs[vStep.ireg]
1080 fallthrough
1081 case abiStepIntReg:
1082 methodRegs.Ints[mStep.ireg] = valueRegs.Ints[vStep.ireg]
1083 case abiStepFloatReg:
1084 methodRegs.Floats[mStep.freg] = valueRegs.Floats[vStep.freg]
1085 default:
1086 panic("unexpected value step")
1087 }
1088 }
1089 }
1090
1091 methodFrameSize := methodFrameType.size
1092
1093
1094 methodFrameSize = align(methodFrameSize, goarch.PtrSize)
1095 methodFrameSize += methodABI.spill
1096
1097
1098 methodRegs.ReturnIsPtr = methodABI.outRegPtrs
1099
1100
1101
1102
1103 call(methodFrameType, methodFn, methodFrame, uint32(methodFrameType.size), uint32(methodABI.retOffset), uint32(methodFrameSize), &methodRegs)
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114 if valueRegs != nil {
1115 *valueRegs = methodRegs
1116 }
1117 if retSize := methodFrameType.size - methodABI.retOffset; retSize > 0 {
1118 valueRet := add(valueFrame, valueABI.retOffset, "valueFrame's size > retOffset")
1119 methodRet := add(methodFrame, methodABI.retOffset, "methodFrame's size > retOffset")
1120
1121 memmove(valueRet, methodRet, retSize)
1122 }
1123
1124
1125
1126 *retValid = true
1127
1128
1129
1130
1131 typedmemclr(methodFrameType, methodFrame)
1132 methodFramePool.Put(methodFrame)
1133
1134
1135 runtime.KeepAlive(ctxt)
1136
1137
1138
1139
1140 runtime.KeepAlive(valueRegs)
1141 }
1142
1143
1144 func funcName(f func([]Value) []Value) string {
1145 pc := *(*uintptr)(unsafe.Pointer(&f))
1146 rf := runtime.FuncForPC(pc)
1147 if rf != nil {
1148 return rf.Name()
1149 }
1150 return "closure"
1151 }
1152
1153
1154
1155 func (v Value) Cap() int {
1156
1157 if v.kind() == Slice {
1158 return (*unsafeheader.Slice)(v.ptr).Cap
1159 }
1160 return v.capNonSlice()
1161 }
1162
1163 func (v Value) capNonSlice() int {
1164 k := v.kind()
1165 switch k {
1166 case Array:
1167 return v.typ.Len()
1168 case Chan:
1169 return chancap(v.pointer())
1170 case Ptr:
1171 if v.typ.Elem().Kind() == Array {
1172 return v.typ.Elem().Len()
1173 }
1174 panic("reflect: call of reflect.Value.Cap on ptr to non-array Value")
1175 }
1176 panic(&ValueError{"reflect.Value.Cap", v.kind()})
1177 }
1178
1179
1180
1181 func (v Value) Close() {
1182 v.mustBe(Chan)
1183 v.mustBeExported()
1184 chanclose(v.pointer())
1185 }
1186
1187
1188 func (v Value) CanComplex() bool {
1189 switch v.kind() {
1190 case Complex64, Complex128:
1191 return true
1192 default:
1193 return false
1194 }
1195 }
1196
1197
1198
1199 func (v Value) Complex() complex128 {
1200 k := v.kind()
1201 switch k {
1202 case Complex64:
1203 return complex128(*(*complex64)(v.ptr))
1204 case Complex128:
1205 return *(*complex128)(v.ptr)
1206 }
1207 panic(&ValueError{"reflect.Value.Complex", v.kind()})
1208 }
1209
1210
1211
1212
1213
1214 func (v Value) Elem() Value {
1215 k := v.kind()
1216 switch k {
1217 case Interface:
1218 var eface any
1219 if v.typ.NumMethod() == 0 {
1220 eface = *(*any)(v.ptr)
1221 } else {
1222 eface = (any)(*(*interface {
1223 M()
1224 })(v.ptr))
1225 }
1226 x := unpackEface(eface)
1227 if x.flag != 0 {
1228 x.flag |= v.flag.ro()
1229 }
1230 return x
1231 case Pointer:
1232 ptr := v.ptr
1233 if v.flag&flagIndir != 0 {
1234 if ifaceIndir(v.typ) {
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245 if !verifyNotInHeapPtr(*(*uintptr)(ptr)) {
1246 panic("reflect: reflect.Value.Elem on an invalid notinheap pointer")
1247 }
1248 }
1249 ptr = *(*unsafe.Pointer)(ptr)
1250 }
1251
1252 if ptr == nil {
1253 return Value{}
1254 }
1255 tt := (*ptrType)(unsafe.Pointer(v.typ))
1256 typ := tt.elem
1257 fl := v.flag&flagRO | flagIndir | flagAddr
1258 fl |= flag(typ.Kind())
1259 return Value{typ, ptr, fl}
1260 }
1261 panic(&ValueError{"reflect.Value.Elem", v.kind()})
1262 }
1263
1264
1265
1266 func (v Value) Field(i int) Value {
1267 if v.kind() != Struct {
1268 panic(&ValueError{"reflect.Value.Field", v.kind()})
1269 }
1270 tt := (*structType)(unsafe.Pointer(v.typ))
1271 if uint(i) >= uint(len(tt.fields)) {
1272 panic("reflect: Field index out of range")
1273 }
1274 field := &tt.fields[i]
1275 typ := field.typ
1276
1277
1278 fl := v.flag&(flagStickyRO|flagIndir|flagAddr) | flag(typ.Kind())
1279
1280 if !field.name.isExported() {
1281 if field.embedded() {
1282 fl |= flagEmbedRO
1283 } else {
1284 fl |= flagStickyRO
1285 }
1286 }
1287
1288
1289
1290
1291
1292 ptr := add(v.ptr, field.offset, "same as non-reflect &v.field")
1293 return Value{typ, ptr, fl}
1294 }
1295
1296
1297
1298
1299 func (v Value) FieldByIndex(index []int) Value {
1300 if len(index) == 1 {
1301 return v.Field(index[0])
1302 }
1303 v.mustBe(Struct)
1304 for i, x := range index {
1305 if i > 0 {
1306 if v.Kind() == Pointer && v.typ.Elem().Kind() == Struct {
1307 if v.IsNil() {
1308 panic("reflect: indirection through nil pointer to embedded struct")
1309 }
1310 v = v.Elem()
1311 }
1312 }
1313 v = v.Field(x)
1314 }
1315 return v
1316 }
1317
1318
1319
1320
1321
1322 func (v Value) FieldByIndexErr(index []int) (Value, error) {
1323 if len(index) == 1 {
1324 return v.Field(index[0]), nil
1325 }
1326 v.mustBe(Struct)
1327 for i, x := range index {
1328 if i > 0 {
1329 if v.Kind() == Ptr && v.typ.Elem().Kind() == Struct {
1330 if v.IsNil() {
1331 return Value{}, errors.New("reflect: indirection through nil pointer to embedded struct field " + v.typ.Elem().Name())
1332 }
1333 v = v.Elem()
1334 }
1335 }
1336 v = v.Field(x)
1337 }
1338 return v, nil
1339 }
1340
1341
1342
1343
1344 func (v Value) FieldByName(name string) Value {
1345 v.mustBe(Struct)
1346 if f, ok := v.typ.FieldByName(name); ok {
1347 return v.FieldByIndex(f.Index)
1348 }
1349 return Value{}
1350 }
1351
1352
1353
1354
1355
1356 func (v Value) FieldByNameFunc(match func(string) bool) Value {
1357 if f, ok := v.typ.FieldByNameFunc(match); ok {
1358 return v.FieldByIndex(f.Index)
1359 }
1360 return Value{}
1361 }
1362
1363
1364 func (v Value) CanFloat() bool {
1365 switch v.kind() {
1366 case Float32, Float64:
1367 return true
1368 default:
1369 return false
1370 }
1371 }
1372
1373
1374
1375 func (v Value) Float() float64 {
1376 k := v.kind()
1377 switch k {
1378 case Float32:
1379 return float64(*(*float32)(v.ptr))
1380 case Float64:
1381 return *(*float64)(v.ptr)
1382 }
1383 panic(&ValueError{"reflect.Value.Float", v.kind()})
1384 }
1385
1386 var uint8Type = rtypeOf(uint8(0))
1387
1388
1389
1390 func (v Value) Index(i int) Value {
1391 switch v.kind() {
1392 case Array:
1393 tt := (*arrayType)(unsafe.Pointer(v.typ))
1394 if uint(i) >= uint(tt.len) {
1395 panic("reflect: array index out of range")
1396 }
1397 typ := tt.elem
1398 offset := uintptr(i) * typ.size
1399
1400
1401
1402
1403
1404
1405 val := add(v.ptr, offset, "same as &v[i], i < tt.len")
1406 fl := v.flag&(flagIndir|flagAddr) | v.flag.ro() | flag(typ.Kind())
1407 return Value{typ, val, fl}
1408
1409 case Slice:
1410
1411
1412 s := (*unsafeheader.Slice)(v.ptr)
1413 if uint(i) >= uint(s.Len) {
1414 panic("reflect: slice index out of range")
1415 }
1416 tt := (*sliceType)(unsafe.Pointer(v.typ))
1417 typ := tt.elem
1418 val := arrayAt(s.Data, i, typ.size, "i < s.Len")
1419 fl := flagAddr | flagIndir | v.flag.ro() | flag(typ.Kind())
1420 return Value{typ, val, fl}
1421
1422 case String:
1423 s := (*unsafeheader.String)(v.ptr)
1424 if uint(i) >= uint(s.Len) {
1425 panic("reflect: string index out of range")
1426 }
1427 p := arrayAt(s.Data, i, 1, "i < s.Len")
1428 fl := v.flag.ro() | flag(Uint8) | flagIndir
1429 return Value{uint8Type, p, fl}
1430 }
1431 panic(&ValueError{"reflect.Value.Index", v.kind()})
1432 }
1433
1434
1435 func (v Value) CanInt() bool {
1436 switch v.kind() {
1437 case Int, Int8, Int16, Int32, Int64:
1438 return true
1439 default:
1440 return false
1441 }
1442 }
1443
1444
1445
1446 func (v Value) Int() int64 {
1447 k := v.kind()
1448 p := v.ptr
1449 switch k {
1450 case Int:
1451 return int64(*(*int)(p))
1452 case Int8:
1453 return int64(*(*int8)(p))
1454 case Int16:
1455 return int64(*(*int16)(p))
1456 case Int32:
1457 return int64(*(*int32)(p))
1458 case Int64:
1459 return *(*int64)(p)
1460 }
1461 panic(&ValueError{"reflect.Value.Int", v.kind()})
1462 }
1463
1464
1465 func (v Value) CanInterface() bool {
1466 if v.flag == 0 {
1467 panic(&ValueError{"reflect.Value.CanInterface", Invalid})
1468 }
1469 return v.flag&flagRO == 0
1470 }
1471
1472
1473
1474
1475
1476
1477
1478
1479 func (v Value) Interface() (i any) {
1480 return valueInterface(v, true)
1481 }
1482
1483 func valueInterface(v Value, safe bool) any {
1484 if v.flag == 0 {
1485 panic(&ValueError{"reflect.Value.Interface", Invalid})
1486 }
1487 if safe && v.flag&flagRO != 0 {
1488
1489
1490
1491 panic("reflect.Value.Interface: cannot return value obtained from unexported field or method")
1492 }
1493 if v.flag&flagMethod != 0 {
1494 v = makeMethodValue("Interface", v)
1495 }
1496
1497 if v.kind() == Interface {
1498
1499
1500
1501 if v.NumMethod() == 0 {
1502 return *(*any)(v.ptr)
1503 }
1504 return *(*interface {
1505 M()
1506 })(v.ptr)
1507 }
1508
1509
1510 return packEface(v)
1511 }
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522 func (v Value) InterfaceData() [2]uintptr {
1523 v.mustBe(Interface)
1524
1525
1526
1527
1528
1529 return *(*[2]uintptr)(v.ptr)
1530 }
1531
1532
1533
1534
1535
1536
1537
1538
1539 func (v Value) IsNil() bool {
1540 k := v.kind()
1541 switch k {
1542 case Chan, Func, Map, Pointer, UnsafePointer:
1543 if v.flag&flagMethod != 0 {
1544 return false
1545 }
1546 ptr := v.ptr
1547 if v.flag&flagIndir != 0 {
1548 ptr = *(*unsafe.Pointer)(ptr)
1549 }
1550 return ptr == nil
1551 case Interface, Slice:
1552
1553
1554 return *(*unsafe.Pointer)(v.ptr) == nil
1555 }
1556 panic(&ValueError{"reflect.Value.IsNil", v.kind()})
1557 }
1558
1559
1560
1561
1562
1563
1564 func (v Value) IsValid() bool {
1565 return v.flag != 0
1566 }
1567
1568
1569
1570 func (v Value) IsZero() bool {
1571 switch v.kind() {
1572 case Bool:
1573 return !v.Bool()
1574 case Int, Int8, Int16, Int32, Int64:
1575 return v.Int() == 0
1576 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
1577 return v.Uint() == 0
1578 case Float32, Float64:
1579 return math.Float64bits(v.Float()) == 0
1580 case Complex64, Complex128:
1581 c := v.Complex()
1582 return math.Float64bits(real(c)) == 0 && math.Float64bits(imag(c)) == 0
1583 case Array:
1584
1585 if v.typ.equal != nil && v.typ.size <= maxZero {
1586 if v.flag&flagIndir == 0 {
1587 return v.ptr == nil
1588 }
1589 return v.typ.equal(v.ptr, unsafe.Pointer(&zeroVal[0]))
1590 }
1591
1592 n := v.Len()
1593 for i := 0; i < n; i++ {
1594 if !v.Index(i).IsZero() {
1595 return false
1596 }
1597 }
1598 return true
1599 case Chan, Func, Interface, Map, Pointer, Slice, UnsafePointer:
1600 return v.IsNil()
1601 case String:
1602 return v.Len() == 0
1603 case Struct:
1604
1605 if v.typ.equal != nil && v.typ.size <= maxZero {
1606 if v.flag&flagIndir == 0 {
1607 return v.ptr == nil
1608 }
1609 return v.typ.equal(v.ptr, unsafe.Pointer(&zeroVal[0]))
1610 }
1611
1612 n := v.NumField()
1613 for i := 0; i < n; i++ {
1614 if !v.Field(i).IsZero() {
1615 return false
1616 }
1617 }
1618 return true
1619 default:
1620
1621
1622 panic(&ValueError{"reflect.Value.IsZero", v.Kind()})
1623 }
1624 }
1625
1626
1627
1628 func (v Value) SetZero() {
1629 v.mustBeAssignable()
1630 switch v.kind() {
1631 case Bool:
1632 *(*bool)(v.ptr) = false
1633 case Int:
1634 *(*int)(v.ptr) = 0
1635 case Int8:
1636 *(*int8)(v.ptr) = 0
1637 case Int16:
1638 *(*int16)(v.ptr) = 0
1639 case Int32:
1640 *(*int32)(v.ptr) = 0
1641 case Int64:
1642 *(*int64)(v.ptr) = 0
1643 case Uint:
1644 *(*uint)(v.ptr) = 0
1645 case Uint8:
1646 *(*uint8)(v.ptr) = 0
1647 case Uint16:
1648 *(*uint16)(v.ptr) = 0
1649 case Uint32:
1650 *(*uint32)(v.ptr) = 0
1651 case Uint64:
1652 *(*uint64)(v.ptr) = 0
1653 case Uintptr:
1654 *(*uintptr)(v.ptr) = 0
1655 case Float32:
1656 *(*float32)(v.ptr) = 0
1657 case Float64:
1658 *(*float64)(v.ptr) = 0
1659 case Complex64:
1660 *(*complex64)(v.ptr) = 0
1661 case Complex128:
1662 *(*complex128)(v.ptr) = 0
1663 case String:
1664 *(*string)(v.ptr) = ""
1665 case Slice:
1666 *(*unsafeheader.Slice)(v.ptr) = unsafeheader.Slice{}
1667 case Interface:
1668 *(*[2]unsafe.Pointer)(v.ptr) = [2]unsafe.Pointer{}
1669 case Chan, Func, Map, Pointer, UnsafePointer:
1670 *(*unsafe.Pointer)(v.ptr) = nil
1671 case Array, Struct:
1672 typedmemclr(v.typ, v.ptr)
1673 default:
1674
1675
1676 panic(&ValueError{"reflect.Value.SetZero", v.Kind()})
1677 }
1678 }
1679
1680
1681
1682 func (v Value) Kind() Kind {
1683 return v.kind()
1684 }
1685
1686
1687
1688 func (v Value) Len() int {
1689
1690 if v.kind() == Slice {
1691 return (*unsafeheader.Slice)(v.ptr).Len
1692 }
1693 return v.lenNonSlice()
1694 }
1695
1696 func (v Value) lenNonSlice() int {
1697 switch k := v.kind(); k {
1698 case Array:
1699 tt := (*arrayType)(unsafe.Pointer(v.typ))
1700 return int(tt.len)
1701 case Chan:
1702 return chanlen(v.pointer())
1703 case Map:
1704 return maplen(v.pointer())
1705 case String:
1706
1707 return (*unsafeheader.String)(v.ptr).Len
1708 case Ptr:
1709 if v.typ.Elem().Kind() == Array {
1710 return v.typ.Elem().Len()
1711 }
1712 panic("reflect: call of reflect.Value.Len on ptr to non-array Value")
1713 }
1714 panic(&ValueError{"reflect.Value.Len", v.kind()})
1715 }
1716
1717 var stringType = rtypeOf("")
1718
1719
1720
1721
1722
1723 func (v Value) MapIndex(key Value) Value {
1724 v.mustBe(Map)
1725 tt := (*mapType)(unsafe.Pointer(v.typ))
1726
1727
1728
1729
1730
1731
1732
1733
1734
1735 var e unsafe.Pointer
1736 if (tt.key == stringType || key.kind() == String) && tt.key == key.typ && tt.elem.size <= maxValSize {
1737 k := *(*string)(key.ptr)
1738 e = mapaccess_faststr(v.typ, v.pointer(), k)
1739 } else {
1740 key = key.assignTo("reflect.Value.MapIndex", tt.key, nil)
1741 var k unsafe.Pointer
1742 if key.flag&flagIndir != 0 {
1743 k = key.ptr
1744 } else {
1745 k = unsafe.Pointer(&key.ptr)
1746 }
1747 e = mapaccess(v.typ, v.pointer(), k)
1748 }
1749 if e == nil {
1750 return Value{}
1751 }
1752 typ := tt.elem
1753 fl := (v.flag | key.flag).ro()
1754 fl |= flag(typ.Kind())
1755 return copyVal(typ, fl, e)
1756 }
1757
1758
1759
1760
1761
1762 func (v Value) MapKeys() []Value {
1763 v.mustBe(Map)
1764 tt := (*mapType)(unsafe.Pointer(v.typ))
1765 keyType := tt.key
1766
1767 fl := v.flag.ro() | flag(keyType.Kind())
1768
1769 m := v.pointer()
1770 mlen := int(0)
1771 if m != nil {
1772 mlen = maplen(m)
1773 }
1774 var it hiter
1775 mapiterinit(v.typ, m, &it)
1776 a := make([]Value, mlen)
1777 var i int
1778 for i = 0; i < len(a); i++ {
1779 key := mapiterkey(&it)
1780 if key == nil {
1781
1782
1783
1784 break
1785 }
1786 a[i] = copyVal(keyType, fl, key)
1787 mapiternext(&it)
1788 }
1789 return a[:i]
1790 }
1791
1792
1793
1794
1795
1796 type hiter struct {
1797 key unsafe.Pointer
1798 elem unsafe.Pointer
1799 t unsafe.Pointer
1800 h unsafe.Pointer
1801 buckets unsafe.Pointer
1802 bptr unsafe.Pointer
1803 overflow *[]unsafe.Pointer
1804 oldoverflow *[]unsafe.Pointer
1805 startBucket uintptr
1806 offset uint8
1807 wrapped bool
1808 B uint8
1809 i uint8
1810 bucket uintptr
1811 checkBucket uintptr
1812 }
1813
1814 func (h *hiter) initialized() bool {
1815 return h.t != nil
1816 }
1817
1818
1819
1820 type MapIter struct {
1821 m Value
1822 hiter hiter
1823 }
1824
1825
1826 func (iter *MapIter) Key() Value {
1827 if !iter.hiter.initialized() {
1828 panic("MapIter.Key called before Next")
1829 }
1830 iterkey := mapiterkey(&iter.hiter)
1831 if iterkey == nil {
1832 panic("MapIter.Key called on exhausted iterator")
1833 }
1834
1835 t := (*mapType)(unsafe.Pointer(iter.m.typ))
1836 ktype := t.key
1837 return copyVal(ktype, iter.m.flag.ro()|flag(ktype.Kind()), iterkey)
1838 }
1839
1840
1841
1842
1843
1844 func (v Value) SetIterKey(iter *MapIter) {
1845 if !iter.hiter.initialized() {
1846 panic("reflect: Value.SetIterKey called before Next")
1847 }
1848 iterkey := mapiterkey(&iter.hiter)
1849 if iterkey == nil {
1850 panic("reflect: Value.SetIterKey called on exhausted iterator")
1851 }
1852
1853 v.mustBeAssignable()
1854 var target unsafe.Pointer
1855 if v.kind() == Interface {
1856 target = v.ptr
1857 }
1858
1859 t := (*mapType)(unsafe.Pointer(iter.m.typ))
1860 ktype := t.key
1861
1862 iter.m.mustBeExported()
1863 key := Value{ktype, iterkey, iter.m.flag | flag(ktype.Kind()) | flagIndir}
1864 key = key.assignTo("reflect.MapIter.SetKey", v.typ, target)
1865 typedmemmove(v.typ, v.ptr, key.ptr)
1866 }
1867
1868
1869 func (iter *MapIter) Value() Value {
1870 if !iter.hiter.initialized() {
1871 panic("MapIter.Value called before Next")
1872 }
1873 iterelem := mapiterelem(&iter.hiter)
1874 if iterelem == nil {
1875 panic("MapIter.Value called on exhausted iterator")
1876 }
1877
1878 t := (*mapType)(unsafe.Pointer(iter.m.typ))
1879 vtype := t.elem
1880 return copyVal(vtype, iter.m.flag.ro()|flag(vtype.Kind()), iterelem)
1881 }
1882
1883
1884
1885
1886
1887 func (v Value) SetIterValue(iter *MapIter) {
1888 if !iter.hiter.initialized() {
1889 panic("reflect: Value.SetIterValue called before Next")
1890 }
1891 iterelem := mapiterelem(&iter.hiter)
1892 if iterelem == nil {
1893 panic("reflect: Value.SetIterValue called on exhausted iterator")
1894 }
1895
1896 v.mustBeAssignable()
1897 var target unsafe.Pointer
1898 if v.kind() == Interface {
1899 target = v.ptr
1900 }
1901
1902 t := (*mapType)(unsafe.Pointer(iter.m.typ))
1903 vtype := t.elem
1904
1905 iter.m.mustBeExported()
1906 elem := Value{vtype, iterelem, iter.m.flag | flag(vtype.Kind()) | flagIndir}
1907 elem = elem.assignTo("reflect.MapIter.SetValue", v.typ, target)
1908 typedmemmove(v.typ, v.ptr, elem.ptr)
1909 }
1910
1911
1912
1913
1914 func (iter *MapIter) Next() bool {
1915 if !iter.m.IsValid() {
1916 panic("MapIter.Next called on an iterator that does not have an associated map Value")
1917 }
1918 if !iter.hiter.initialized() {
1919 mapiterinit(iter.m.typ, iter.m.pointer(), &iter.hiter)
1920 } else {
1921 if mapiterkey(&iter.hiter) == nil {
1922 panic("MapIter.Next called on exhausted iterator")
1923 }
1924 mapiternext(&iter.hiter)
1925 }
1926 return mapiterkey(&iter.hiter) != nil
1927 }
1928
1929
1930
1931
1932
1933 func (iter *MapIter) Reset(v Value) {
1934 if v.IsValid() {
1935 v.mustBe(Map)
1936 }
1937 iter.m = v
1938 iter.hiter = hiter{}
1939 }
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956 func (v Value) MapRange() *MapIter {
1957
1958
1959
1960
1961 if v.kind() != Map {
1962 v.panicNotMap()
1963 }
1964 return &MapIter{m: v}
1965 }
1966
1967 func (f flag) panicNotMap() {
1968 f.mustBe(Map)
1969 }
1970
1971
1972
1973 func copyVal(typ *rtype, fl flag, ptr unsafe.Pointer) Value {
1974 if ifaceIndir(typ) {
1975
1976
1977 c := unsafe_New(typ)
1978 typedmemmove(typ, c, ptr)
1979 return Value{typ, c, fl | flagIndir}
1980 }
1981 return Value{typ, *(*unsafe.Pointer)(ptr), fl}
1982 }
1983
1984
1985
1986
1987
1988 func (v Value) Method(i int) Value {
1989 if v.typ == nil {
1990 panic(&ValueError{"reflect.Value.Method", Invalid})
1991 }
1992 if v.flag&flagMethod != 0 || uint(i) >= uint(v.typ.NumMethod()) {
1993 panic("reflect: Method index out of range")
1994 }
1995 if v.typ.Kind() == Interface && v.IsNil() {
1996 panic("reflect: Method on nil interface value")
1997 }
1998 fl := v.flag.ro() | (v.flag & flagIndir)
1999 fl |= flag(Func)
2000 fl |= flag(i)<<flagMethodShift | flagMethod
2001 return Value{v.typ, v.ptr, fl}
2002 }
2003
2004
2005
2006
2007
2008
2009 func (v Value) NumMethod() int {
2010 if v.typ == nil {
2011 panic(&ValueError{"reflect.Value.NumMethod", Invalid})
2012 }
2013 if v.flag&flagMethod != 0 {
2014 return 0
2015 }
2016 return v.typ.NumMethod()
2017 }
2018
2019
2020
2021
2022
2023
2024 func (v Value) MethodByName(name string) Value {
2025 if v.typ == nil {
2026 panic(&ValueError{"reflect.Value.MethodByName", Invalid})
2027 }
2028 if v.flag&flagMethod != 0 {
2029 return Value{}
2030 }
2031 m, ok := v.typ.MethodByName(name)
2032 if !ok {
2033 return Value{}
2034 }
2035 return v.Method(m.Index)
2036 }
2037
2038
2039
2040 func (v Value) NumField() int {
2041 v.mustBe(Struct)
2042 tt := (*structType)(unsafe.Pointer(v.typ))
2043 return len(tt.fields)
2044 }
2045
2046
2047
2048 func (v Value) OverflowComplex(x complex128) bool {
2049 k := v.kind()
2050 switch k {
2051 case Complex64:
2052 return overflowFloat32(real(x)) || overflowFloat32(imag(x))
2053 case Complex128:
2054 return false
2055 }
2056 panic(&ValueError{"reflect.Value.OverflowComplex", v.kind()})
2057 }
2058
2059
2060
2061 func (v Value) OverflowFloat(x float64) bool {
2062 k := v.kind()
2063 switch k {
2064 case Float32:
2065 return overflowFloat32(x)
2066 case Float64:
2067 return false
2068 }
2069 panic(&ValueError{"reflect.Value.OverflowFloat", v.kind()})
2070 }
2071
2072 func overflowFloat32(x float64) bool {
2073 if x < 0 {
2074 x = -x
2075 }
2076 return math.MaxFloat32 < x && x <= math.MaxFloat64
2077 }
2078
2079
2080
2081 func (v Value) OverflowInt(x int64) bool {
2082 k := v.kind()
2083 switch k {
2084 case Int, Int8, Int16, Int32, Int64:
2085 bitSize := v.typ.size * 8
2086 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
2087 return x != trunc
2088 }
2089 panic(&ValueError{"reflect.Value.OverflowInt", v.kind()})
2090 }
2091
2092
2093
2094 func (v Value) OverflowUint(x uint64) bool {
2095 k := v.kind()
2096 switch k {
2097 case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64:
2098 bitSize := v.typ.size * 8
2099 trunc := (x << (64 - bitSize)) >> (64 - bitSize)
2100 return x != trunc
2101 }
2102 panic(&ValueError{"reflect.Value.OverflowUint", v.kind()})
2103 }
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123 func (v Value) Pointer() uintptr {
2124 k := v.kind()
2125 switch k {
2126 case Pointer:
2127 if v.typ.ptrdata == 0 {
2128 val := *(*uintptr)(v.ptr)
2129
2130
2131 if !verifyNotInHeapPtr(val) {
2132 panic("reflect: reflect.Value.Pointer on an invalid notinheap pointer")
2133 }
2134 return val
2135 }
2136 fallthrough
2137 case Chan, Map, UnsafePointer:
2138 return uintptr(v.pointer())
2139 case Func:
2140 if v.flag&flagMethod != 0 {
2141
2142
2143
2144
2145
2146
2147 return methodValueCallCodePtr()
2148 }
2149 p := v.pointer()
2150
2151
2152 if p != nil {
2153 p = *(*unsafe.Pointer)(p)
2154 }
2155 return uintptr(p)
2156
2157 case Slice:
2158 return uintptr((*unsafeheader.Slice)(v.ptr).Data)
2159 }
2160 panic(&ValueError{"reflect.Value.Pointer", v.kind()})
2161 }
2162
2163
2164
2165
2166
2167
2168 func (v Value) Recv() (x Value, ok bool) {
2169 v.mustBe(Chan)
2170 v.mustBeExported()
2171 return v.recv(false)
2172 }
2173
2174
2175
2176 func (v Value) recv(nb bool) (val Value, ok bool) {
2177 tt := (*chanType)(unsafe.Pointer(v.typ))
2178 if ChanDir(tt.dir)&RecvDir == 0 {
2179 panic("reflect: recv on send-only channel")
2180 }
2181 t := tt.elem
2182 val = Value{t, nil, flag(t.Kind())}
2183 var p unsafe.Pointer
2184 if ifaceIndir(t) {
2185 p = unsafe_New(t)
2186 val.ptr = p
2187 val.flag |= flagIndir
2188 } else {
2189 p = unsafe.Pointer(&val.ptr)
2190 }
2191 selected, ok := chanrecv(v.pointer(), nb, p)
2192 if !selected {
2193 val = Value{}
2194 }
2195 return
2196 }
2197
2198
2199
2200
2201 func (v Value) Send(x Value) {
2202 v.mustBe(Chan)
2203 v.mustBeExported()
2204 v.send(x, false)
2205 }
2206
2207
2208
2209 func (v Value) send(x Value, nb bool) (selected bool) {
2210 tt := (*chanType)(unsafe.Pointer(v.typ))
2211 if ChanDir(tt.dir)&SendDir == 0 {
2212 panic("reflect: send on recv-only channel")
2213 }
2214 x.mustBeExported()
2215 x = x.assignTo("reflect.Value.Send", tt.elem, nil)
2216 var p unsafe.Pointer
2217 if x.flag&flagIndir != 0 {
2218 p = x.ptr
2219 } else {
2220 p = unsafe.Pointer(&x.ptr)
2221 }
2222 return chansend(v.pointer(), p, nb)
2223 }
2224
2225
2226
2227
2228
2229 func (v Value) Set(x Value) {
2230 v.mustBeAssignable()
2231 x.mustBeExported()
2232 var target unsafe.Pointer
2233 if v.kind() == Interface {
2234 target = v.ptr
2235 }
2236 x = x.assignTo("reflect.Set", v.typ, target)
2237 if x.flag&flagIndir != 0 {
2238 if x.ptr == unsafe.Pointer(&zeroVal[0]) {
2239 typedmemclr(v.typ, v.ptr)
2240 } else {
2241 typedmemmove(v.typ, v.ptr, x.ptr)
2242 }
2243 } else {
2244 *(*unsafe.Pointer)(v.ptr) = x.ptr
2245 }
2246 }
2247
2248
2249
2250 func (v Value) SetBool(x bool) {
2251 v.mustBeAssignable()
2252 v.mustBe(Bool)
2253 *(*bool)(v.ptr) = x
2254 }
2255
2256
2257
2258 func (v Value) SetBytes(x []byte) {
2259 v.mustBeAssignable()
2260 v.mustBe(Slice)
2261 if v.typ.Elem().Kind() != Uint8 {
2262 panic("reflect.Value.SetBytes of non-byte slice")
2263 }
2264 *(*[]byte)(v.ptr) = x
2265 }
2266
2267
2268
2269 func (v Value) setRunes(x []rune) {
2270 v.mustBeAssignable()
2271 v.mustBe(Slice)
2272 if v.typ.Elem().Kind() != Int32 {
2273 panic("reflect.Value.setRunes of non-rune slice")
2274 }
2275 *(*[]rune)(v.ptr) = x
2276 }
2277
2278
2279
2280 func (v Value) SetComplex(x complex128) {
2281 v.mustBeAssignable()
2282 switch k := v.kind(); k {
2283 default:
2284 panic(&ValueError{"reflect.Value.SetComplex", v.kind()})
2285 case Complex64:
2286 *(*complex64)(v.ptr) = complex64(x)
2287 case Complex128:
2288 *(*complex128)(v.ptr) = x
2289 }
2290 }
2291
2292
2293
2294 func (v Value) SetFloat(x float64) {
2295 v.mustBeAssignable()
2296 switch k := v.kind(); k {
2297 default:
2298 panic(&ValueError{"reflect.Value.SetFloat", v.kind()})
2299 case Float32:
2300 *(*float32)(v.ptr) = float32(x)
2301 case Float64:
2302 *(*float64)(v.ptr) = x
2303 }
2304 }
2305
2306
2307
2308 func (v Value) SetInt(x int64) {
2309 v.mustBeAssignable()
2310 switch k := v.kind(); k {
2311 default:
2312 panic(&ValueError{"reflect.Value.SetInt", v.kind()})
2313 case Int:
2314 *(*int)(v.ptr) = int(x)
2315 case Int8:
2316 *(*int8)(v.ptr) = int8(x)
2317 case Int16:
2318 *(*int16)(v.ptr) = int16(x)
2319 case Int32:
2320 *(*int32)(v.ptr) = int32(x)
2321 case Int64:
2322 *(*int64)(v.ptr) = x
2323 }
2324 }
2325
2326
2327
2328
2329 func (v Value) SetLen(n int) {
2330 v.mustBeAssignable()
2331 v.mustBe(Slice)
2332 s := (*unsafeheader.Slice)(v.ptr)
2333 if uint(n) > uint(s.Cap) {
2334 panic("reflect: slice length out of range in SetLen")
2335 }
2336 s.Len = n
2337 }
2338
2339
2340
2341
2342 func (v Value) SetCap(n int) {
2343 v.mustBeAssignable()
2344 v.mustBe(Slice)
2345 s := (*unsafeheader.Slice)(v.ptr)
2346 if n < s.Len || n > s.Cap {
2347 panic("reflect: slice capacity out of range in SetCap")
2348 }
2349 s.Cap = n
2350 }
2351
2352
2353
2354
2355
2356
2357
2358 func (v Value) SetMapIndex(key, elem Value) {
2359 v.mustBe(Map)
2360 v.mustBeExported()
2361 key.mustBeExported()
2362 tt := (*mapType)(unsafe.Pointer(v.typ))
2363
2364 if (tt.key == stringType || key.kind() == String) && tt.key == key.typ && tt.elem.size <= maxValSize {
2365 k := *(*string)(key.ptr)
2366 if elem.typ == nil {
2367 mapdelete_faststr(v.typ, v.pointer(), k)
2368 return
2369 }
2370 elem.mustBeExported()
2371 elem = elem.assignTo("reflect.Value.SetMapIndex", tt.elem, nil)
2372 var e unsafe.Pointer
2373 if elem.flag&flagIndir != 0 {
2374 e = elem.ptr
2375 } else {
2376 e = unsafe.Pointer(&elem.ptr)
2377 }
2378 mapassign_faststr(v.typ, v.pointer(), k, e)
2379 return
2380 }
2381
2382 key = key.assignTo("reflect.Value.SetMapIndex", tt.key, nil)
2383 var k unsafe.Pointer
2384 if key.flag&flagIndir != 0 {
2385 k = key.ptr
2386 } else {
2387 k = unsafe.Pointer(&key.ptr)
2388 }
2389 if elem.typ == nil {
2390 mapdelete(v.typ, v.pointer(), k)
2391 return
2392 }
2393 elem.mustBeExported()
2394 elem = elem.assignTo("reflect.Value.SetMapIndex", tt.elem, nil)
2395 var e unsafe.Pointer
2396 if elem.flag&flagIndir != 0 {
2397 e = elem.ptr
2398 } else {
2399 e = unsafe.Pointer(&elem.ptr)
2400 }
2401 mapassign(v.typ, v.pointer(), k, e)
2402 }
2403
2404
2405
2406 func (v Value) SetUint(x uint64) {
2407 v.mustBeAssignable()
2408 switch k := v.kind(); k {
2409 default:
2410 panic(&ValueError{"reflect.Value.SetUint", v.kind()})
2411 case Uint:
2412 *(*uint)(v.ptr) = uint(x)
2413 case Uint8:
2414 *(*uint8)(v.ptr) = uint8(x)
2415 case Uint16:
2416 *(*uint16)(v.ptr) = uint16(x)
2417 case Uint32:
2418 *(*uint32)(v.ptr) = uint32(x)
2419 case Uint64:
2420 *(*uint64)(v.ptr) = x
2421 case Uintptr:
2422 *(*uintptr)(v.ptr) = uintptr(x)
2423 }
2424 }
2425
2426
2427
2428 func (v Value) SetPointer(x unsafe.Pointer) {
2429 v.mustBeAssignable()
2430 v.mustBe(UnsafePointer)
2431 *(*unsafe.Pointer)(v.ptr) = x
2432 }
2433
2434
2435
2436 func (v Value) SetString(x string) {
2437 v.mustBeAssignable()
2438 v.mustBe(String)
2439 *(*string)(v.ptr) = x
2440 }
2441
2442
2443
2444
2445 func (v Value) Slice(i, j int) Value {
2446 var (
2447 cap int
2448 typ *sliceType
2449 base unsafe.Pointer
2450 )
2451 switch kind := v.kind(); kind {
2452 default:
2453 panic(&ValueError{"reflect.Value.Slice", v.kind()})
2454
2455 case Array:
2456 if v.flag&flagAddr == 0 {
2457 panic("reflect.Value.Slice: slice of unaddressable array")
2458 }
2459 tt := (*arrayType)(unsafe.Pointer(v.typ))
2460 cap = int(tt.len)
2461 typ = (*sliceType)(unsafe.Pointer(tt.slice))
2462 base = v.ptr
2463
2464 case Slice:
2465 typ = (*sliceType)(unsafe.Pointer(v.typ))
2466 s := (*unsafeheader.Slice)(v.ptr)
2467 base = s.Data
2468 cap = s.Cap
2469
2470 case String:
2471 s := (*unsafeheader.String)(v.ptr)
2472 if i < 0 || j < i || j > s.Len {
2473 panic("reflect.Value.Slice: string slice index out of bounds")
2474 }
2475 var t unsafeheader.String
2476 if i < s.Len {
2477 t = unsafeheader.String{Data: arrayAt(s.Data, i, 1, "i < s.Len"), Len: j - i}
2478 }
2479 return Value{v.typ, unsafe.Pointer(&t), v.flag}
2480 }
2481
2482 if i < 0 || j < i || j > cap {
2483 panic("reflect.Value.Slice: slice index out of bounds")
2484 }
2485
2486
2487 var x []unsafe.Pointer
2488
2489
2490 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2491 s.Len = j - i
2492 s.Cap = cap - i
2493 if cap-i > 0 {
2494 s.Data = arrayAt(base, i, typ.elem.Size(), "i < cap")
2495 } else {
2496
2497 s.Data = base
2498 }
2499
2500 fl := v.flag.ro() | flagIndir | flag(Slice)
2501 return Value{typ.common(), unsafe.Pointer(&x), fl}
2502 }
2503
2504
2505
2506
2507 func (v Value) Slice3(i, j, k int) Value {
2508 var (
2509 cap int
2510 typ *sliceType
2511 base unsafe.Pointer
2512 )
2513 switch kind := v.kind(); kind {
2514 default:
2515 panic(&ValueError{"reflect.Value.Slice3", v.kind()})
2516
2517 case Array:
2518 if v.flag&flagAddr == 0 {
2519 panic("reflect.Value.Slice3: slice of unaddressable array")
2520 }
2521 tt := (*arrayType)(unsafe.Pointer(v.typ))
2522 cap = int(tt.len)
2523 typ = (*sliceType)(unsafe.Pointer(tt.slice))
2524 base = v.ptr
2525
2526 case Slice:
2527 typ = (*sliceType)(unsafe.Pointer(v.typ))
2528 s := (*unsafeheader.Slice)(v.ptr)
2529 base = s.Data
2530 cap = s.Cap
2531 }
2532
2533 if i < 0 || j < i || k < j || k > cap {
2534 panic("reflect.Value.Slice3: slice index out of bounds")
2535 }
2536
2537
2538
2539 var x []unsafe.Pointer
2540
2541
2542 s := (*unsafeheader.Slice)(unsafe.Pointer(&x))
2543 s.Len = j - i
2544 s.Cap = k - i
2545 if k-i > 0 {
2546 s.Data = arrayAt(base, i, typ.elem.Size(), "i < k <= cap")
2547 } else {
2548
2549 s.Data = base
2550 }
2551
2552 fl := v.flag.ro() | flagIndir | flag(Slice)
2553 return Value{typ.common(), unsafe.Pointer(&x), fl}
2554 }
2555
2556
2557
2558
2559
2560
2561
2562 func (v Value) String() string {
2563
2564 if v.kind() == String {
2565 return *(*string)(v.ptr)
2566 }
2567 return v.stringNonString()
2568 }
2569
2570 func (v Value) stringNonString() string {
2571 if v.kind() == Invalid {
2572 return "<invalid Value>"
2573 }
2574
2575
2576 return "<" + v.Type().String() + " Value>"
2577 }
2578
2579
2580
2581
2582
2583
2584 func (v Value) TryRecv() (x Value, ok bool) {
2585 v.mustBe(Chan)
2586 v.mustBeExported()
2587 return v.recv(true)
2588 }
2589
2590
2591
2592
2593
2594 func (v Value) TrySend(x Value) bool {
2595 v.mustBe(Chan)
2596 v.mustBeExported()
2597 return v.send(x, true)
2598 }
2599
2600
2601 func (v Value) Type() Type {
2602 if v.flag != 0 && v.flag&flagMethod == 0 {
2603 return v.typ
2604 }
2605 return v.typeSlow()
2606 }
2607
2608 func (v Value) typeSlow() Type {
2609 if v.flag == 0 {
2610 panic(&ValueError{"reflect.Value.Type", Invalid})
2611 }
2612 if v.flag&flagMethod == 0 {
2613 return v.typ
2614 }
2615
2616
2617
2618 i := int(v.flag) >> flagMethodShift
2619 if v.typ.Kind() == Interface {
2620
2621 tt := (*interfaceType)(unsafe.Pointer(v.typ))
2622 if uint(i) >= uint(len(tt.methods)) {
2623 panic("reflect: internal error: invalid method index")
2624 }
2625 m := &tt.methods[i]
2626 return v.typ.typeOff(m.typ)
2627 }
2628
2629 ms := v.typ.exportedMethods()
2630 if uint(i) >= uint(len(ms)) {
2631 panic("reflect: internal error: invalid method index")
2632 }
2633 m := ms[i]
2634 return v.typ.typeOff(m.mtyp)
2635 }
2636
2637
2638 func (v Value) CanUint() bool {
2639 switch v.kind() {
2640 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
2641 return true
2642 default:
2643 return false
2644 }
2645 }
2646
2647
2648
2649 func (v Value) Uint() uint64 {
2650 k := v.kind()
2651 p := v.ptr
2652 switch k {
2653 case Uint:
2654 return uint64(*(*uint)(p))
2655 case Uint8:
2656 return uint64(*(*uint8)(p))
2657 case Uint16:
2658 return uint64(*(*uint16)(p))
2659 case Uint32:
2660 return uint64(*(*uint32)(p))
2661 case Uint64:
2662 return *(*uint64)(p)
2663 case Uintptr:
2664 return uint64(*(*uintptr)(p))
2665 }
2666 panic(&ValueError{"reflect.Value.Uint", v.kind()})
2667 }
2668
2669
2670
2671
2672
2673
2674
2675
2676
2677
2678 func (v Value) UnsafeAddr() uintptr {
2679 if v.typ == nil {
2680 panic(&ValueError{"reflect.Value.UnsafeAddr", Invalid})
2681 }
2682 if v.flag&flagAddr == 0 {
2683 panic("reflect.Value.UnsafeAddr of unaddressable value")
2684 }
2685 return uintptr(v.ptr)
2686 }
2687
2688
2689
2690
2691
2692
2693
2694
2695
2696
2697
2698
2699 func (v Value) UnsafePointer() unsafe.Pointer {
2700 k := v.kind()
2701 switch k {
2702 case Pointer:
2703 if v.typ.ptrdata == 0 {
2704
2705
2706 if !verifyNotInHeapPtr(*(*uintptr)(v.ptr)) {
2707 panic("reflect: reflect.Value.UnsafePointer on an invalid notinheap pointer")
2708 }
2709 return *(*unsafe.Pointer)(v.ptr)
2710 }
2711 fallthrough
2712 case Chan, Map, UnsafePointer:
2713 return v.pointer()
2714 case Func:
2715 if v.flag&flagMethod != 0 {
2716
2717
2718
2719
2720
2721
2722 code := methodValueCallCodePtr()
2723 return *(*unsafe.Pointer)(unsafe.Pointer(&code))
2724 }
2725 p := v.pointer()
2726
2727
2728 if p != nil {
2729 p = *(*unsafe.Pointer)(p)
2730 }
2731 return p
2732
2733 case Slice:
2734 return (*unsafeheader.Slice)(v.ptr).Data
2735 }
2736 panic(&ValueError{"reflect.Value.UnsafePointer", v.kind()})
2737 }
2738
2739
2740
2741
2742
2743
2744
2745
2746
2747 type StringHeader struct {
2748 Data uintptr
2749 Len int
2750 }
2751
2752
2753
2754
2755
2756
2757
2758
2759
2760 type SliceHeader struct {
2761 Data uintptr
2762 Len int
2763 Cap int
2764 }
2765
2766 func typesMustMatch(what string, t1, t2 Type) {
2767 if t1 != t2 {
2768 panic(what + ": " + t1.String() + " != " + t2.String())
2769 }
2770 }
2771
2772
2773
2774
2775
2776
2777
2778
2779 func arrayAt(p unsafe.Pointer, i int, eltSize uintptr, whySafe string) unsafe.Pointer {
2780 return add(p, uintptr(i)*eltSize, "i < len")
2781 }
2782
2783
2784
2785
2786
2787
2788
2789 func (v Value) Grow(n int) {
2790 v.mustBeAssignable()
2791 v.mustBe(Slice)
2792 v.grow(n)
2793 }
2794
2795
2796 func (v Value) grow(n int) {
2797 p := (*unsafeheader.Slice)(v.ptr)
2798 switch {
2799 case n < 0:
2800 panic("reflect.Value.Grow: negative len")
2801 case p.Len+n < 0:
2802 panic("reflect.Value.Grow: slice overflow")
2803 case p.Len+n > p.Cap:
2804 t := v.typ.Elem().(*rtype)
2805 *p = growslice(t, *p, n)
2806 }
2807 }
2808
2809
2810
2811
2812
2813
2814
2815 func (v Value) extendSlice(n int) Value {
2816 v.mustBeExported()
2817 v.mustBe(Slice)
2818
2819
2820 sh := *(*unsafeheader.Slice)(v.ptr)
2821 s := &sh
2822 v.ptr = unsafe.Pointer(s)
2823 v.flag = flagIndir | flag(Slice)
2824
2825 v.grow(n)
2826 s.Len += n
2827 return v
2828 }
2829
2830
2831
2832 func Append(s Value, x ...Value) Value {
2833 s.mustBe(Slice)
2834 n := s.Len()
2835 s = s.extendSlice(len(x))
2836 for i, v := range x {
2837 s.Index(n + i).Set(v)
2838 }
2839 return s
2840 }
2841
2842
2843
2844 func AppendSlice(s, t Value) Value {
2845 s.mustBe(Slice)
2846 t.mustBe(Slice)
2847 typesMustMatch("reflect.AppendSlice", s.Type().Elem(), t.Type().Elem())
2848 ns := s.Len()
2849 nt := t.Len()
2850 s = s.extendSlice(nt)
2851 Copy(s.Slice(ns, ns+nt), t)
2852 return s
2853 }
2854
2855
2856
2857
2858
2859
2860
2861
2862 func Copy(dst, src Value) int {
2863 dk := dst.kind()
2864 if dk != Array && dk != Slice {
2865 panic(&ValueError{"reflect.Copy", dk})
2866 }
2867 if dk == Array {
2868 dst.mustBeAssignable()
2869 }
2870 dst.mustBeExported()
2871
2872 sk := src.kind()
2873 var stringCopy bool
2874 if sk != Array && sk != Slice {
2875 stringCopy = sk == String && dst.typ.Elem().Kind() == Uint8
2876 if !stringCopy {
2877 panic(&ValueError{"reflect.Copy", sk})
2878 }
2879 }
2880 src.mustBeExported()
2881
2882 de := dst.typ.Elem()
2883 if !stringCopy {
2884 se := src.typ.Elem()
2885 typesMustMatch("reflect.Copy", de, se)
2886 }
2887
2888 var ds, ss unsafeheader.Slice
2889 if dk == Array {
2890 ds.Data = dst.ptr
2891 ds.Len = dst.Len()
2892 ds.Cap = ds.Len
2893 } else {
2894 ds = *(*unsafeheader.Slice)(dst.ptr)
2895 }
2896 if sk == Array {
2897 ss.Data = src.ptr
2898 ss.Len = src.Len()
2899 ss.Cap = ss.Len
2900 } else if sk == Slice {
2901 ss = *(*unsafeheader.Slice)(src.ptr)
2902 } else {
2903 sh := *(*unsafeheader.String)(src.ptr)
2904 ss.Data = sh.Data
2905 ss.Len = sh.Len
2906 ss.Cap = sh.Len
2907 }
2908
2909 return typedslicecopy(de.common(), ds, ss)
2910 }
2911
2912
2913
2914 type runtimeSelect struct {
2915 dir SelectDir
2916 typ *rtype
2917 ch unsafe.Pointer
2918 val unsafe.Pointer
2919 }
2920
2921
2922
2923
2924
2925
2926
2927 func rselect([]runtimeSelect) (chosen int, recvOK bool)
2928
2929
2930 type SelectDir int
2931
2932
2933
2934 const (
2935 _ SelectDir = iota
2936 SelectSend
2937 SelectRecv
2938 SelectDefault
2939 )
2940
2941
2942
2943
2944
2945
2946
2947
2948
2949
2950
2951
2952
2953
2954
2955
2956
2957 type SelectCase struct {
2958 Dir SelectDir
2959 Chan Value
2960 Send Value
2961 }
2962
2963
2964
2965
2966
2967
2968
2969
2970
2971 func Select(cases []SelectCase) (chosen int, recv Value, recvOK bool) {
2972 if len(cases) > 65536 {
2973 panic("reflect.Select: too many cases (max 65536)")
2974 }
2975
2976
2977
2978 var runcases []runtimeSelect
2979 if len(cases) > 4 {
2980
2981 runcases = make([]runtimeSelect, len(cases))
2982 } else {
2983
2984 runcases = make([]runtimeSelect, len(cases), 4)
2985 }
2986
2987 haveDefault := false
2988 for i, c := range cases {
2989 rc := &runcases[i]
2990 rc.dir = c.Dir
2991 switch c.Dir {
2992 default:
2993 panic("reflect.Select: invalid Dir")
2994
2995 case SelectDefault:
2996 if haveDefault {
2997 panic("reflect.Select: multiple default cases")
2998 }
2999 haveDefault = true
3000 if c.Chan.IsValid() {
3001 panic("reflect.Select: default case has Chan value")
3002 }
3003 if c.Send.IsValid() {
3004 panic("reflect.Select: default case has Send value")
3005 }
3006
3007 case SelectSend:
3008 ch := c.Chan
3009 if !ch.IsValid() {
3010 break
3011 }
3012 ch.mustBe(Chan)
3013 ch.mustBeExported()
3014 tt := (*chanType)(unsafe.Pointer(ch.typ))
3015 if ChanDir(tt.dir)&SendDir == 0 {
3016 panic("reflect.Select: SendDir case using recv-only channel")
3017 }
3018 rc.ch = ch.pointer()
3019 rc.typ = &tt.rtype
3020 v := c.Send
3021 if !v.IsValid() {
3022 panic("reflect.Select: SendDir case missing Send value")
3023 }
3024 v.mustBeExported()
3025 v = v.assignTo("reflect.Select", tt.elem, nil)
3026 if v.flag&flagIndir != 0 {
3027 rc.val = v.ptr
3028 } else {
3029 rc.val = unsafe.Pointer(&v.ptr)
3030 }
3031
3032 case SelectRecv:
3033 if c.Send.IsValid() {
3034 panic("reflect.Select: RecvDir case has Send value")
3035 }
3036 ch := c.Chan
3037 if !ch.IsValid() {
3038 break
3039 }
3040 ch.mustBe(Chan)
3041 ch.mustBeExported()
3042 tt := (*chanType)(unsafe.Pointer(ch.typ))
3043 if ChanDir(tt.dir)&RecvDir == 0 {
3044 panic("reflect.Select: RecvDir case using send-only channel")
3045 }
3046 rc.ch = ch.pointer()
3047 rc.typ = &tt.rtype
3048 rc.val = unsafe_New(tt.elem)
3049 }
3050 }
3051
3052 chosen, recvOK = rselect(runcases)
3053 if runcases[chosen].dir == SelectRecv {
3054 tt := (*chanType)(unsafe.Pointer(runcases[chosen].typ))
3055 t := tt.elem
3056 p := runcases[chosen].val
3057 fl := flag(t.Kind())
3058 if ifaceIndir(t) {
3059 recv = Value{t, p, fl | flagIndir}
3060 } else {
3061 recv = Value{t, *(*unsafe.Pointer)(p), fl}
3062 }
3063 }
3064 return chosen, recv, recvOK
3065 }
3066
3067
3070
3071
3072 func unsafe_New(*rtype) unsafe.Pointer
3073 func unsafe_NewArray(*rtype, int) unsafe.Pointer
3074
3075
3076
3077 func MakeSlice(typ Type, len, cap int) Value {
3078 if typ.Kind() != Slice {
3079 panic("reflect.MakeSlice of non-slice type")
3080 }
3081 if len < 0 {
3082 panic("reflect.MakeSlice: negative len")
3083 }
3084 if cap < 0 {
3085 panic("reflect.MakeSlice: negative cap")
3086 }
3087 if len > cap {
3088 panic("reflect.MakeSlice: len > cap")
3089 }
3090
3091 s := unsafeheader.Slice{Data: unsafe_NewArray(typ.Elem().(*rtype), cap), Len: len, Cap: cap}
3092 return Value{typ.(*rtype), unsafe.Pointer(&s), flagIndir | flag(Slice)}
3093 }
3094
3095
3096 func MakeChan(typ Type, buffer int) Value {
3097 if typ.Kind() != Chan {
3098 panic("reflect.MakeChan of non-chan type")
3099 }
3100 if buffer < 0 {
3101 panic("reflect.MakeChan: negative buffer size")
3102 }
3103 if typ.ChanDir() != BothDir {
3104 panic("reflect.MakeChan: unidirectional channel type")
3105 }
3106 t := typ.(*rtype)
3107 ch := makechan(t, buffer)
3108 return Value{t, ch, flag(Chan)}
3109 }
3110
3111
3112 func MakeMap(typ Type) Value {
3113 return MakeMapWithSize(typ, 0)
3114 }
3115
3116
3117
3118 func MakeMapWithSize(typ Type, n int) Value {
3119 if typ.Kind() != Map {
3120 panic("reflect.MakeMapWithSize of non-map type")
3121 }
3122 t := typ.(*rtype)
3123 m := makemap(t, n)
3124 return Value{t, m, flag(Map)}
3125 }
3126
3127
3128
3129
3130 func Indirect(v Value) Value {
3131 if v.Kind() != Pointer {
3132 return v
3133 }
3134 return v.Elem()
3135 }
3136
3137
3138
3139 func ValueOf(i any) Value {
3140 if i == nil {
3141 return Value{}
3142 }
3143
3144
3145
3146
3147
3148 escapes(i)
3149
3150 return unpackEface(i)
3151 }
3152
3153
3154
3155
3156
3157
3158 func Zero(typ Type) Value {
3159 if typ == nil {
3160 panic("reflect: Zero(nil)")
3161 }
3162 t := typ.(*rtype)
3163 fl := flag(t.Kind())
3164 if ifaceIndir(t) {
3165 var p unsafe.Pointer
3166 if t.size <= maxZero {
3167 p = unsafe.Pointer(&zeroVal[0])
3168 } else {
3169 p = unsafe_New(t)
3170 }
3171 return Value{t, p, fl | flagIndir}
3172 }
3173 return Value{t, nil, fl}
3174 }
3175
3176
3177 const maxZero = 1024
3178
3179
3180 var zeroVal [maxZero]byte
3181
3182
3183
3184 func New(typ Type) Value {
3185 if typ == nil {
3186 panic("reflect: New(nil)")
3187 }
3188 t := typ.(*rtype)
3189 pt := t.ptrTo()
3190 if ifaceIndir(pt) {
3191
3192 panic("reflect: New of type that may not be allocated in heap (possibly undefined cgo C type)")
3193 }
3194 ptr := unsafe_New(t)
3195 fl := flag(Pointer)
3196 return Value{pt, ptr, fl}
3197 }
3198
3199
3200
3201 func NewAt(typ Type, p unsafe.Pointer) Value {
3202 fl := flag(Pointer)
3203 t := typ.(*rtype)
3204 return Value{t.ptrTo(), p, fl}
3205 }
3206
3207
3208
3209
3210
3211
3212 func (v Value) assignTo(context string, dst *rtype, target unsafe.Pointer) Value {
3213 if v.flag&flagMethod != 0 {
3214 v = makeMethodValue(context, v)
3215 }
3216
3217 switch {
3218 case directlyAssignable(dst, v.typ):
3219
3220
3221 fl := v.flag&(flagAddr|flagIndir) | v.flag.ro()
3222 fl |= flag(dst.Kind())
3223 return Value{dst, v.ptr, fl}
3224
3225 case implements(dst, v.typ):
3226 if v.Kind() == Interface && v.IsNil() {
3227
3228
3229
3230 return Value{dst, nil, flag(Interface)}
3231 }
3232 x := valueInterface(v, false)
3233 if target == nil {
3234 target = unsafe_New(dst)
3235 }
3236 if dst.NumMethod() == 0 {
3237 *(*any)(target) = x
3238 } else {
3239 ifaceE2I(dst, x, target)
3240 }
3241 return Value{dst, target, flagIndir | flag(Interface)}
3242 }
3243
3244
3245 panic(context + ": value of type " + v.typ.String() + " is not assignable to type " + dst.String())
3246 }
3247
3248
3249
3250
3251 func (v Value) Convert(t Type) Value {
3252 if v.flag&flagMethod != 0 {
3253 v = makeMethodValue("Convert", v)
3254 }
3255 op := convertOp(t.common(), v.typ)
3256 if op == nil {
3257 panic("reflect.Value.Convert: value of type " + v.typ.String() + " cannot be converted to type " + t.String())
3258 }
3259 return op(v, t)
3260 }
3261
3262
3263
3264 func (v Value) CanConvert(t Type) bool {
3265 vt := v.Type()
3266 if !vt.ConvertibleTo(t) {
3267 return false
3268 }
3269
3270
3271 switch {
3272 case vt.Kind() == Slice && t.Kind() == Array:
3273 if t.Len() > v.Len() {
3274 return false
3275 }
3276 case vt.Kind() == Slice && t.Kind() == Pointer && t.Elem().Kind() == Array:
3277 n := t.Elem().Len()
3278 if n > v.Len() {
3279 return false
3280 }
3281 }
3282 return true
3283 }
3284
3285
3286
3287
3288
3289 func (v Value) Comparable() bool {
3290 k := v.Kind()
3291 switch k {
3292 case Invalid:
3293 return false
3294
3295 case Array:
3296 switch v.Type().Elem().Kind() {
3297 case Interface, Array, Struct:
3298 for i := 0; i < v.Type().Len(); i++ {
3299 if !v.Index(i).Comparable() {
3300 return false
3301 }
3302 }
3303 return true
3304 }
3305 return v.Type().Comparable()
3306
3307 case Interface:
3308 return v.Elem().Comparable()
3309
3310 case Struct:
3311 for i := 0; i < v.NumField(); i++ {
3312 if !v.Field(i).Comparable() {
3313 return false
3314 }
3315 }
3316 return true
3317
3318 default:
3319 return v.Type().Comparable()
3320 }
3321 }
3322
3323
3324
3325
3326
3327
3328
3329
3330
3331 func (v Value) Equal(u Value) bool {
3332 if v.Kind() == Interface {
3333 v = v.Elem()
3334 }
3335 if u.Kind() == Interface {
3336 u = u.Elem()
3337 }
3338
3339 if !v.IsValid() || !u.IsValid() {
3340 return v.IsValid() == u.IsValid()
3341 }
3342
3343 if v.Kind() != u.Kind() || v.Type() != u.Type() {
3344 return false
3345 }
3346
3347
3348
3349 switch v.Kind() {
3350 default:
3351 panic("reflect.Value.Equal: invalid Kind")
3352 case Bool:
3353 return v.Bool() == u.Bool()
3354 case Int, Int8, Int16, Int32, Int64:
3355 return v.Int() == u.Int()
3356 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3357 return v.Uint() == u.Uint()
3358 case Float32, Float64:
3359 return v.Float() == u.Float()
3360 case Complex64, Complex128:
3361 return v.Complex() == u.Complex()
3362 case String:
3363 return v.String() == u.String()
3364 case Chan, Pointer, UnsafePointer:
3365 return v.Pointer() == u.Pointer()
3366 case Array:
3367
3368 vl := v.Len()
3369 if vl == 0 {
3370
3371 if !v.Type().Elem().Comparable() {
3372 break
3373 }
3374 return true
3375 }
3376 for i := 0; i < vl; i++ {
3377 if !v.Index(i).Equal(u.Index(i)) {
3378 return false
3379 }
3380 }
3381 return true
3382 case Struct:
3383
3384 nf := v.NumField()
3385 for i := 0; i < nf; i++ {
3386 if !v.Field(i).Equal(u.Field(i)) {
3387 return false
3388 }
3389 }
3390 return true
3391 case Func, Map, Slice:
3392 break
3393 }
3394 panic("reflect.Value.Equal: values of type " + v.Type().String() + " are not comparable")
3395 }
3396
3397
3398
3399 func convertOp(dst, src *rtype) func(Value, Type) Value {
3400 switch src.Kind() {
3401 case Int, Int8, Int16, Int32, Int64:
3402 switch dst.Kind() {
3403 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3404 return cvtInt
3405 case Float32, Float64:
3406 return cvtIntFloat
3407 case String:
3408 return cvtIntString
3409 }
3410
3411 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3412 switch dst.Kind() {
3413 case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3414 return cvtUint
3415 case Float32, Float64:
3416 return cvtUintFloat
3417 case String:
3418 return cvtUintString
3419 }
3420
3421 case Float32, Float64:
3422 switch dst.Kind() {
3423 case Int, Int8, Int16, Int32, Int64:
3424 return cvtFloatInt
3425 case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
3426 return cvtFloatUint
3427 case Float32, Float64:
3428 return cvtFloat
3429 }
3430
3431 case Complex64, Complex128:
3432 switch dst.Kind() {
3433 case Complex64, Complex128:
3434 return cvtComplex
3435 }
3436
3437 case String:
3438 if dst.Kind() == Slice && dst.Elem().PkgPath() == "" {
3439 switch dst.Elem().Kind() {
3440 case Uint8:
3441 return cvtStringBytes
3442 case Int32:
3443 return cvtStringRunes
3444 }
3445 }
3446
3447 case Slice:
3448 if dst.Kind() == String && src.Elem().PkgPath() == "" {
3449 switch src.Elem().Kind() {
3450 case Uint8:
3451 return cvtBytesString
3452 case Int32:
3453 return cvtRunesString
3454 }
3455 }
3456
3457
3458 if dst.Kind() == Pointer && dst.Elem().Kind() == Array && src.Elem() == dst.Elem().Elem() {
3459 return cvtSliceArrayPtr
3460 }
3461
3462
3463 if dst.Kind() == Array && src.Elem() == dst.Elem() {
3464 return cvtSliceArray
3465 }
3466
3467 case Chan:
3468 if dst.Kind() == Chan && specialChannelAssignability(dst, src) {
3469 return cvtDirect
3470 }
3471 }
3472
3473
3474 if haveIdenticalUnderlyingType(dst, src, false) {
3475 return cvtDirect
3476 }
3477
3478
3479 if dst.Kind() == Pointer && dst.Name() == "" &&
3480 src.Kind() == Pointer && src.Name() == "" &&
3481 haveIdenticalUnderlyingType(dst.Elem().common(), src.Elem().common(), false) {
3482 return cvtDirect
3483 }
3484
3485 if implements(dst, src) {
3486 if src.Kind() == Interface {
3487 return cvtI2I
3488 }
3489 return cvtT2I
3490 }
3491
3492 return nil
3493 }
3494
3495
3496
3497 func makeInt(f flag, bits uint64, t Type) Value {
3498 typ := t.common()
3499 ptr := unsafe_New(typ)
3500 switch typ.size {
3501 case 1:
3502 *(*uint8)(ptr) = uint8(bits)
3503 case 2:
3504 *(*uint16)(ptr) = uint16(bits)
3505 case 4:
3506 *(*uint32)(ptr) = uint32(bits)
3507 case 8:
3508 *(*uint64)(ptr) = bits
3509 }
3510 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3511 }
3512
3513
3514
3515 func makeFloat(f flag, v float64, t Type) Value {
3516 typ := t.common()
3517 ptr := unsafe_New(typ)
3518 switch typ.size {
3519 case 4:
3520 *(*float32)(ptr) = float32(v)
3521 case 8:
3522 *(*float64)(ptr) = v
3523 }
3524 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3525 }
3526
3527
3528 func makeFloat32(f flag, v float32, t Type) Value {
3529 typ := t.common()
3530 ptr := unsafe_New(typ)
3531 *(*float32)(ptr) = v
3532 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3533 }
3534
3535
3536
3537 func makeComplex(f flag, v complex128, t Type) Value {
3538 typ := t.common()
3539 ptr := unsafe_New(typ)
3540 switch typ.size {
3541 case 8:
3542 *(*complex64)(ptr) = complex64(v)
3543 case 16:
3544 *(*complex128)(ptr) = v
3545 }
3546 return Value{typ, ptr, f | flagIndir | flag(typ.Kind())}
3547 }
3548
3549 func makeString(f flag, v string, t Type) Value {
3550 ret := New(t).Elem()
3551 ret.SetString(v)
3552 ret.flag = ret.flag&^flagAddr | f
3553 return ret
3554 }
3555
3556 func makeBytes(f flag, v []byte, t Type) Value {
3557 ret := New(t).Elem()
3558 ret.SetBytes(v)
3559 ret.flag = ret.flag&^flagAddr | f
3560 return ret
3561 }
3562
3563 func makeRunes(f flag, v []rune, t Type) Value {
3564 ret := New(t).Elem()
3565 ret.setRunes(v)
3566 ret.flag = ret.flag&^flagAddr | f
3567 return ret
3568 }
3569
3570
3571
3572
3573
3574
3575
3576 func cvtInt(v Value, t Type) Value {
3577 return makeInt(v.flag.ro(), uint64(v.Int()), t)
3578 }
3579
3580
3581 func cvtUint(v Value, t Type) Value {
3582 return makeInt(v.flag.ro(), v.Uint(), t)
3583 }
3584
3585
3586 func cvtFloatInt(v Value, t Type) Value {
3587 return makeInt(v.flag.ro(), uint64(int64(v.Float())), t)
3588 }
3589
3590
3591 func cvtFloatUint(v Value, t Type) Value {
3592 return makeInt(v.flag.ro(), uint64(v.Float()), t)
3593 }
3594
3595
3596 func cvtIntFloat(v Value, t Type) Value {
3597 return makeFloat(v.flag.ro(), float64(v.Int()), t)
3598 }
3599
3600
3601 func cvtUintFloat(v Value, t Type) Value {
3602 return makeFloat(v.flag.ro(), float64(v.Uint()), t)
3603 }
3604
3605
3606 func cvtFloat(v Value, t Type) Value {
3607 if v.Type().Kind() == Float32 && t.Kind() == Float32 {
3608
3609
3610
3611 return makeFloat32(v.flag.ro(), *(*float32)(v.ptr), t)
3612 }
3613 return makeFloat(v.flag.ro(), v.Float(), t)
3614 }
3615
3616
3617 func cvtComplex(v Value, t Type) Value {
3618 return makeComplex(v.flag.ro(), v.Complex(), t)
3619 }
3620
3621
3622 func cvtIntString(v Value, t Type) Value {
3623 s := "\uFFFD"
3624 if x := v.Int(); int64(rune(x)) == x {
3625 s = string(rune(x))
3626 }
3627 return makeString(v.flag.ro(), s, t)
3628 }
3629
3630
3631 func cvtUintString(v Value, t Type) Value {
3632 s := "\uFFFD"
3633 if x := v.Uint(); uint64(rune(x)) == x {
3634 s = string(rune(x))
3635 }
3636 return makeString(v.flag.ro(), s, t)
3637 }
3638
3639
3640 func cvtBytesString(v Value, t Type) Value {
3641 return makeString(v.flag.ro(), string(v.Bytes()), t)
3642 }
3643
3644
3645 func cvtStringBytes(v Value, t Type) Value {
3646 return makeBytes(v.flag.ro(), []byte(v.String()), t)
3647 }
3648
3649
3650 func cvtRunesString(v Value, t Type) Value {
3651 return makeString(v.flag.ro(), string(v.runes()), t)
3652 }
3653
3654
3655 func cvtStringRunes(v Value, t Type) Value {
3656 return makeRunes(v.flag.ro(), []rune(v.String()), t)
3657 }
3658
3659
3660 func cvtSliceArrayPtr(v Value, t Type) Value {
3661 n := t.Elem().Len()
3662 if n > v.Len() {
3663 panic("reflect: cannot convert slice with length " + itoa.Itoa(v.Len()) + " to pointer to array with length " + itoa.Itoa(n))
3664 }
3665 h := (*unsafeheader.Slice)(v.ptr)
3666 return Value{t.common(), h.Data, v.flag&^(flagIndir|flagAddr|flagKindMask) | flag(Pointer)}
3667 }
3668
3669
3670 func cvtSliceArray(v Value, t Type) Value {
3671 n := t.Len()
3672 if n > v.Len() {
3673 panic("reflect: cannot convert slice with length " + itoa.Itoa(v.Len()) + " to array with length " + itoa.Itoa(n))
3674 }
3675 h := (*unsafeheader.Slice)(v.ptr)
3676 typ := t.common()
3677 ptr := h.Data
3678 c := unsafe_New(typ)
3679 typedmemmove(typ, c, ptr)
3680 ptr = c
3681
3682 return Value{typ, ptr, v.flag&^(flagAddr|flagKindMask) | flag(Array)}
3683 }
3684
3685
3686 func cvtDirect(v Value, typ Type) Value {
3687 f := v.flag
3688 t := typ.common()
3689 ptr := v.ptr
3690 if f&flagAddr != 0 {
3691
3692 c := unsafe_New(t)
3693 typedmemmove(t, c, ptr)
3694 ptr = c
3695 f &^= flagAddr
3696 }
3697 return Value{t, ptr, v.flag.ro() | f}
3698 }
3699
3700
3701 func cvtT2I(v Value, typ Type) Value {
3702 target := unsafe_New(typ.common())
3703 x := valueInterface(v, false)
3704 if typ.NumMethod() == 0 {
3705 *(*any)(target) = x
3706 } else {
3707 ifaceE2I(typ.(*rtype), x, target)
3708 }
3709 return Value{typ.common(), target, v.flag.ro() | flagIndir | flag(Interface)}
3710 }
3711
3712
3713 func cvtI2I(v Value, typ Type) Value {
3714 if v.IsNil() {
3715 ret := Zero(typ)
3716 ret.flag |= v.flag.ro()
3717 return ret
3718 }
3719 return cvtT2I(v.Elem(), typ)
3720 }
3721
3722
3723 func chancap(ch unsafe.Pointer) int
3724 func chanclose(ch unsafe.Pointer)
3725 func chanlen(ch unsafe.Pointer) int
3726
3727
3728
3729
3730
3731
3732
3733
3734
3735
3736 func chanrecv(ch unsafe.Pointer, nb bool, val unsafe.Pointer) (selected, received bool)
3737
3738
3739 func chansend(ch unsafe.Pointer, val unsafe.Pointer, nb bool) bool
3740
3741 func makechan(typ *rtype, size int) (ch unsafe.Pointer)
3742 func makemap(t *rtype, cap int) (m unsafe.Pointer)
3743
3744
3745 func mapaccess(t *rtype, m unsafe.Pointer, key unsafe.Pointer) (val unsafe.Pointer)
3746
3747
3748 func mapaccess_faststr(t *rtype, m unsafe.Pointer, key string) (val unsafe.Pointer)
3749
3750
3751 func mapassign(t *rtype, m unsafe.Pointer, key, val unsafe.Pointer)
3752
3753
3754 func mapassign_faststr(t *rtype, m unsafe.Pointer, key string, val unsafe.Pointer)
3755
3756
3757 func mapdelete(t *rtype, m unsafe.Pointer, key unsafe.Pointer)
3758
3759
3760 func mapdelete_faststr(t *rtype, m unsafe.Pointer, key string)
3761
3762
3763 func mapiterinit(t *rtype, m unsafe.Pointer, it *hiter)
3764
3765
3766 func mapiterkey(it *hiter) (key unsafe.Pointer)
3767
3768
3769 func mapiterelem(it *hiter) (elem unsafe.Pointer)
3770
3771
3772 func mapiternext(it *hiter)
3773
3774
3775 func maplen(m unsafe.Pointer) int
3776
3777
3778
3779
3780
3781
3782
3783
3784
3785
3786
3787
3788
3789
3790
3791
3792
3793
3794
3795
3796
3797
3798
3799
3800
3801
3802
3803 func call(stackArgsType *rtype, f, stackArgs unsafe.Pointer, stackArgsSize, stackRetOffset, frameSize uint32, regArgs *abi.RegArgs)
3804
3805 func ifaceE2I(t *rtype, src any, dst unsafe.Pointer)
3806
3807
3808
3809
3810 func memmove(dst, src unsafe.Pointer, size uintptr)
3811
3812
3813
3814
3815 func typedmemmove(t *rtype, dst, src unsafe.Pointer)
3816
3817
3818
3819
3820
3821 func typedmemmovepartial(t *rtype, dst, src unsafe.Pointer, off, size uintptr)
3822
3823
3824
3825
3826 func typedmemclr(t *rtype, ptr unsafe.Pointer)
3827
3828
3829
3830
3831
3832 func typedmemclrpartial(t *rtype, ptr unsafe.Pointer, off, size uintptr)
3833
3834
3835
3836
3837
3838 func typedslicecopy(elemType *rtype, dst, src unsafeheader.Slice) int
3839
3840
3841 func typehash(t *rtype, p unsafe.Pointer, h uintptr) uintptr
3842
3843 func verifyNotInHeapPtr(p uintptr) bool
3844
3845
3846 func growslice(t *rtype, old unsafeheader.Slice, num int) unsafeheader.Slice
3847
3848
3849
3850
3851 func escapes(x any) {
3852 if dummy.b {
3853 dummy.x = x
3854 }
3855 }
3856
3857 var dummy struct {
3858 b bool
3859 x any
3860 }
3861
View as plain text