8 #define TUPLE_WRAP_INDEX() \
9 if (index < 0) index += self->values.count; \
10 if (index < 0 || index >= (krk_integer_type)self->values.count) return krk_runtimeError(vm.exceptions->indexError, "tuple index out of range: %zd", (ssize_t)index)
12 static int _tuple_init_callback(
void * context,
const KrkValue * values,
size_t count) {
16 positionals->
capacity = (count == 1) ? KRK_GROW_CAPACITY(old) : (positionals->
count + count);
20 for (
size_t i = 0; i < count; ++i) {
21 positionals->
values[positionals->
count++] = values[i];
27 KRK_StaticMethod(tuple,__new__) {
28 METHOD_TAKES_AT_MOST(1);
43 for (
size_t i = 0; i < (size_t)argc; ++i) {
44 self->values.values[
self->values.count++] = argv[i];
48 return OBJECT_VAL(
self);
51 #define IS_tuple(o) IS_TUPLE(o)
52 #define AS_tuple(o) AS_TUPLE(o)
54 #define CURRENT_CTYPE KrkTuple *
55 #define CURRENT_NAME self
57 KRK_Method(tuple,__contains__) {
58 METHOD_TAKES_EXACTLY(1);
59 for (
size_t i = 0; i <
self->values.count; ++i) {
62 return BOOLEAN_VAL(0);
65 KRK_Method(tuple,__len__) {
67 return INTEGER_VAL(self->values.count);
70 KRK_Method(tuple,__getitem__) {
71 METHOD_TAKES_EXACTLY(1);
72 if (IS_INTEGER(argv[1])) {
73 CHECK_ARG(1,
int,krk_integer_type,index);
75 return self->values.values[index];
76 }
else if (IS_slice(argv[1])) {
77 KRK_SLICER(argv[1],self->values.count) {
82 krk_integer_type len = end - start;
88 krk_integer_type len = 0;
89 krk_integer_type i = start;
90 while ((step < 0) ? (i > end) : (i < end)) {
107 return TYPE_ERROR(
int or slice, argv[1]);
111 KRK_Method(tuple,__eq__) {
112 METHOD_TAKES_EXACTLY(1);
113 if (!IS_tuple(argv[1]))
return NOTIMPL_VAL();
114 KrkTuple * them = AS_tuple(argv[1]);
115 if (self->values.count != them->
values.
count)
return BOOLEAN_VAL(0);
116 for (
size_t i = 0; i <
self->values.count; ++i) {
119 return BOOLEAN_VAL(1);
122 #define MAKE_TUPLE_COMPARE(name,op) \
123 KRK_Method(tuple,__ ## name ## __) { \
124 METHOD_TAKES_EXACTLY(1); \
125 if (!IS_tuple(argv[1])) return NOTIMPL_VAL(); \
126 KrkTuple * them = AS_tuple(argv[1]); \
127 size_t lesser = self->values.count < them->values.count ? self->values.count : them->values.count; \
128 for (size_t i = 0; i < lesser; ++i) { \
129 KrkValue a = self->values.values[i]; \
130 KrkValue b = them->values.values[i]; \
131 if (krk_valuesSameOrEqual(a,b)) continue; \
132 if (unlikely(krk_currentThread.flags & KRK_THREAD_HAS_EXCEPTION)) return NONE_VAL(); \
133 return krk_operator_ ## name(a,b); \
135 return BOOLEAN_VAL((self->values.count op them->values.count)); \
138 MAKE_TUPLE_COMPARE(gt,>)
139 MAKE_TUPLE_COMPARE(lt,<)
140 MAKE_TUPLE_COMPARE(ge,>=)
141 MAKE_TUPLE_COMPARE(le,<=)
143 KRK_Method(tuple,__repr__) {
144 if (((
KrkObj*)
self)->flags & KRK_OBJ_FLAGS_IN_REPR)
return OBJECT_VAL(S(
"(...)"));
145 ((
KrkObj*)
self)->flags |= KRK_OBJ_FLAGS_IN_REPR;
148 pushStringBuilder(&sb,
'(');
150 for (
size_t i = 0; i <
self->values.count; ++i) {
151 if (i) pushStringBuilderStr(&sb,
", ", 2);
152 if (!krk_pushStringBuilderFormat(&sb,
"%R", self->values.values[i]))
goto _error;
155 if (self->values.count == 1) {
156 pushStringBuilder(&sb,
',');
159 pushStringBuilder(&sb,
')');
160 ((
KrkObj*)
self)->flags &= ~(KRK_OBJ_FLAGS_IN_REPR);
161 return finishStringBuilder(&sb);
164 ((
KrkObj*)
self)->flags &= ~(KRK_OBJ_FLAGS_IN_REPR);
169 KRK_Method(tuple,__add__) {
170 METHOD_TAKES_EXACTLY(1);
171 if (!IS_tuple(argv[1]))
173 "can only concatenate tuple (not '%T') to tuple", argv[1]);
175 KrkTuple * other = AS_tuple(argv[1]);
178 for (
size_t i = 0; i <
self->values.count; ++i) {
197 static KrkValue _tuple_iter_init(
int argc,
const KrkValue argv[],
int hasKw) {
199 self->myTuple = argv[1];
204 static void _tuple_iter_gcscan(
KrkInstance *
self) {
208 static KrkValue _tuple_iter_call(
int argc,
const KrkValue argv[],
int hasKw) {
212 if (i >= (krk_integer_type)AS_TUPLE(t)->values.count) {
216 return AS_TUPLE(t)->values.values[i];
220 KRK_Method(tuple,__iter__) {
225 return OBJECT_VAL(output);
228 KRK_Method(tuple,__hash__) {
229 if (self->obj.flags & KRK_OBJ_FLAGS_VALID_HASH) {
230 return INTEGER_VAL(self->obj.hash);
232 uint32_t t =
self->values.count;
234 for (
size_t i = 0; i < (size_t)self->values.count; ++i) {
236 if (
krk_hashValue(self->values.values[i], &step))
goto _unhashable;
238 m += 2 * (
self->values.count - i) + 82520;
241 self->obj.flags |= KRK_OBJ_FLAGS_VALID_HASH;
242 return INTEGER_VAL(self->obj.hash);
247 KRK_Method(tuple,__mul__) {
248 METHOD_TAKES_EXACTLY(1);
250 if (!IS_INTEGER(argv[1]))
return NOTIMPL_VAL();
252 ssize_t count = AS_INTEGER(argv[1]);
253 if (count < 0) count = 0;
256 for (ssize_t i = 0; i < count; ++i) {
257 for (
size_t j = 0; j <
self->values.count; ++j) {
266 void _createAndBind_tupleClass(
void) {
267 KrkClass * tuple = ADD_BASE_CLASS(
vm.baseClasses->tupleClass,
"tuple",
vm.baseClasses->objectClass);
268 tuple->
obj.
flags |= KRK_OBJ_FLAGS_NO_INHERIT;
270 BIND_STATICMETHOD(tuple,__new__);
271 BIND_METHOD(tuple,__repr__);
272 BIND_METHOD(tuple,__getitem__);
273 BIND_METHOD(tuple,__len__);
274 BIND_METHOD(tuple,__contains__);
275 BIND_METHOD(tuple,__iter__);
276 BIND_METHOD(tuple,__eq__);
277 BIND_METHOD(tuple,__lt__);
278 BIND_METHOD(tuple,__gt__);
279 BIND_METHOD(tuple,__le__);
280 BIND_METHOD(tuple,__ge__);
281 BIND_METHOD(tuple,__hash__);
282 BIND_METHOD(tuple,__add__);
283 BIND_METHOD(tuple,__mul__);
286 ADD_BASE_CLASS(
vm.baseClasses->tupleiteratorClass,
"tupleiterator",
vm.baseClasses->objectClass);
287 vm.baseClasses->tupleiteratorClass->allocSize =
sizeof(
struct TupleIter);
288 vm.baseClasses->tupleiteratorClass->_ongcscan = _tuple_iter_gcscan;
289 krk_defineNative(&
vm.baseClasses->tupleiteratorClass->methods,
"__init__", _tuple_iter_init);
290 krk_defineNative(&
vm.baseClasses->tupleiteratorClass->methods,
"__call__", _tuple_iter_call);
KrkValue krk_runtimeError(KrkClass *type, const char *fmt,...)
Produce and raise an exception with a formatted message.
Functions for dealing with garbage collection and memory allocation.
void krk_markValue(KrkValue value)
During a GC scan cycle, mark a value as used.
size_t allocSize
Size to allocate when creating instances of this class.
void krk_finalizeClass(KrkClass *_class)
Finalize a class by collecting pointers to core methods.
KrkInstance * krk_newInstance(KrkClass *_class)
Create a new instance of the given class.
The most basic object type.
uint16_t flags
General object flags, mostly related to garbage collection.
KrkNative * krk_defineNative(KrkTable *table, const char *name, NativeFn function)
Attach a native C function to an attribute table.
Immutable sequence of arbitrary values.
KrkValueArray values
Stores the length, capacity, and actual values of the tuple.
KrkValue krk_tuple_of(int argc, const KrkValue argv[], int hasKw)
Create a tuple object.
KrkTuple * krk_newTuple(size_t length)
Create a new tuple.
Flexible vector of stack references.
Stack reference or primative value.
int krk_valuesSameOrEqual(KrkValue a, KrkValue b)
Compare two values by identity, then by equality.
int krk_hashValue(KrkValue value, uint32_t *hashOut)
Calculate the hash for a value.
Inline flexible string array.
Iterator over the values in a tuple.
Utilities for creating native bindings.
KrkValue krk_discardStringBuilder(struct StringBuilder *sb)
Discard the contents of a string builder.
int krk_unpackIterable(KrkValue iterable, void *context, int callback(void *, const KrkValue *, size_t))
Unpack an iterable.
Definitions for primitive stack references.
Core API for the bytecode virtual machine.
krk_threadLocal KrkThreadState krk_currentThread
Thread-local VM state.
#define vm
Convenience macro for namespacing.
KrkValue krk_callNativeOnStack(size_t argCount, const KrkValue *stackArgs, int hasKw, NativeFn native)
Call a native function using a reference to stack arguments safely.
KrkValue krk_pop(void)
Pop the top of the stack.
void krk_push(KrkValue value)
Push a stack value.
KrkValue krk_peek(int distance)
Peek down from the top of the stack.