1
0
Fork 0
mirror of https://github.com/ruby/ruby.git synced 2022-11-09 12:17:21 -05:00

style: switch statements indent

Case labels get half an indent and the opening brace is on the same line
as "switch".
This commit is contained in:
Alan Wu 2021-09-29 15:38:57 -04:00
parent a10cf74e5c
commit ec4998bd69
3 changed files with 160 additions and 172 deletions

View file

@ -62,19 +62,19 @@ x86opnd_t mem_opnd_sib(uint32_t num_bits, x86opnd_t base_reg, x86opnd_t index_re
{
uint8_t scale_exp;
switch (scale) {
case 8:
case 8:
scale_exp = 3;
break;
case 4:
case 4:
scale_exp = 2;
break;
case 2:
case 2:
scale_exp = 1;
break;
case 1:
case 1:
scale_exp = 0;
break;
default:
default:
assert(false && "scale not one of 1,2,4,8");
break;
}
@ -335,13 +335,12 @@ void cb_write_int(codeblock_t *cb, uint64_t val, uint32_t num_bits)
assert (num_bits % 8 == 0);
// Switch on the number of bits
switch (num_bits)
{
case 8:
switch (num_bits) {
case 8:
cb_write_byte(cb, (uint8_t)val);
break;
case 16:
case 16:
cb_write_bytes(
cb,
2,
@ -350,7 +349,7 @@ void cb_write_int(codeblock_t *cb, uint64_t val, uint32_t num_bits)
);
break;
case 32:
case 32:
cb_write_bytes(
cb,
4,
@ -361,7 +360,7 @@ void cb_write_int(codeblock_t *cb, uint64_t val, uint32_t num_bits)
);
break;
default:
default:
{
// Compute the size in bytes
uint32_t num_bytes = num_bits / 8;
@ -1439,57 +1438,56 @@ void neg(codeblock_t *cb, x86opnd_t opnd)
// nop - Noop, one or multiple bytes long
void nop(codeblock_t *cb, uint32_t length)
{
switch (length)
{
case 0:
switch (length) {
case 0:
break;
case 1:
case 1:
//cb.writeASM("nop1");
cb_write_byte(cb, 0x90);
break;
case 2:
case 2:
//cb.writeASM("nop2");
cb_write_bytes(cb, 2, 0x66,0x90);
break;
case 3:
case 3:
//cb.writeASM("nop3");
cb_write_bytes(cb, 3, 0x0F,0x1F,0x00);
break;
case 4:
case 4:
//cb.writeASM("nop4");
cb_write_bytes(cb, 4, 0x0F,0x1F,0x40,0x00);
break;
case 5:
case 5:
//cb.writeASM("nop5");
cb_write_bytes(cb, 5, 0x0F,0x1F,0x44,0x00,0x00);
break;
case 6:
case 6:
//cb.writeASM("nop6");
cb_write_bytes(cb, 6, 0x66,0x0F,0x1F,0x44,0x00,0x00);
break;
case 7:
case 7:
//cb.writeASM("nop7");
cb_write_bytes(cb, 7, 0x0F,0x1F,0x80,0x00,0x00,0x00,0x00);
break;
case 8:
case 8:
//cb.writeASM("nop8");
cb_write_bytes(cb, 8, 0x0F,0x1F,0x84,0x00,0x00,0x00,0x00,0x00);
break;
case 9:
case 9:
//cb.writeASM("nop9");
cb_write_bytes(cb, 9, 0x66,0x0F,0x1F,0x84,0x00,0x00,0x00,0x00,0x00);
break;
default:
default:
{
uint32_t written = 0;
while (written + 9 <= length)

View file

@ -1397,14 +1397,13 @@ guard_self_is_heap(codeblock_t *cb, x86opnd_t self_opnd, uint8_t *side_exit, ctx
static void
gen_jnz_to_target0(codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
{
switch (shape)
{
case SHAPE_NEXT0:
case SHAPE_NEXT1:
switch (shape) {
case SHAPE_NEXT0:
case SHAPE_NEXT1:
RUBY_ASSERT(false);
break;
case SHAPE_DEFAULT:
case SHAPE_DEFAULT:
jnz_ptr(cb, target0);
break;
}
@ -1413,14 +1412,13 @@ gen_jnz_to_target0(codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t
static void
gen_jz_to_target0(codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
{
switch (shape)
{
case SHAPE_NEXT0:
case SHAPE_NEXT1:
switch (shape) {
case SHAPE_NEXT0:
case SHAPE_NEXT1:
RUBY_ASSERT(false);
break;
case SHAPE_DEFAULT:
case SHAPE_DEFAULT:
jz_ptr(cb, target0);
break;
}
@ -1429,14 +1427,13 @@ gen_jz_to_target0(codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t s
static void
gen_jbe_to_target0(codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
{
switch (shape)
{
case SHAPE_NEXT0:
case SHAPE_NEXT1:
switch (shape) {
case SHAPE_NEXT0:
case SHAPE_NEXT1:
RUBY_ASSERT(false);
break;
case SHAPE_DEFAULT:
case SHAPE_DEFAULT:
jbe_ptr(cb, target0);
break;
}
@ -1459,21 +1456,21 @@ jit_chain_guard(enum jcc_kinds jcc, jitstate_t *jit, const ctx_t *ctx, uint8_t d
branchgen_fn target0_gen_fn;
switch (jcc) {
case JCC_JNE:
case JCC_JNZ:
target0_gen_fn = gen_jnz_to_target0;
break;
case JCC_JZ:
case JCC_JE:
target0_gen_fn = gen_jz_to_target0;
break;
case JCC_JBE:
case JCC_JNA:
target0_gen_fn = gen_jbe_to_target0;
break;
default:
RUBY_ASSERT(false && "unimplemented jump kind");
break;
case JCC_JNE:
case JCC_JNZ:
target0_gen_fn = gen_jnz_to_target0;
break;
case JCC_JZ:
case JCC_JE:
target0_gen_fn = gen_jz_to_target0;
break;
case JCC_JBE:
case JCC_JNA:
target0_gen_fn = gen_jbe_to_target0;
break;
default:
RUBY_ASSERT(false && "unimplemented jump kind");
break;
};
if (ctx->chain_depth < depth_limit) {
@ -2598,17 +2595,16 @@ gen_opt_case_dispatch(jitstate_t *jit, ctx_t *ctx, codeblock_t *cb)
void
gen_branchif_branch(codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
{
switch (shape)
{
case SHAPE_NEXT0:
switch (shape) {
case SHAPE_NEXT0:
jz_ptr(cb, target1);
break;
case SHAPE_NEXT1:
case SHAPE_NEXT1:
jnz_ptr(cb, target0);
break;
case SHAPE_DEFAULT:
case SHAPE_DEFAULT:
jnz_ptr(cb, target0);
jmp_ptr(cb, target1);
break;
@ -2655,17 +2651,16 @@ gen_branchif(jitstate_t *jit, ctx_t *ctx, codeblock_t *cb)
void
gen_branchunless_branch(codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
{
switch (shape)
{
case SHAPE_NEXT0:
switch (shape) {
case SHAPE_NEXT0:
jnz_ptr(cb, target1);
break;
case SHAPE_NEXT1:
case SHAPE_NEXT1:
jz_ptr(cb, target0);
break;
case SHAPE_DEFAULT:
case SHAPE_DEFAULT:
jz_ptr(cb, target0);
jmp_ptr(cb, target1);
break;
@ -2712,17 +2707,16 @@ gen_branchunless(jitstate_t *jit, ctx_t *ctx, codeblock_t *cb)
void
gen_branchnil_branch(codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
{
switch (shape)
{
case SHAPE_NEXT0:
switch (shape) {
case SHAPE_NEXT0:
jne_ptr(cb, target1);
break;
case SHAPE_NEXT1:
case SHAPE_NEXT1:
je_ptr(cb, target0);
break;
case SHAPE_DEFAULT:
case SHAPE_DEFAULT:
je_ptr(cb, target0);
jmp_ptr(cb, target1);
break;
@ -3303,14 +3297,13 @@ gen_send_cfunc(jitstate_t *jit, ctx_t *ctx, const struct rb_callinfo *ci, const
static void
gen_return_branch(codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
{
switch (shape)
{
case SHAPE_NEXT0:
case SHAPE_NEXT1:
switch (shape) {
case SHAPE_NEXT0:
case SHAPE_NEXT1:
RUBY_ASSERT(false);
break;
case SHAPE_DEFAULT:
case SHAPE_DEFAULT:
mov(cb, REG0, const_ptr_opnd(target0));
mov(cb, member_opnd(REG_CFP, rb_control_frame_t, jit_return), REG0);
break;
@ -3657,20 +3650,20 @@ gen_send_general(jitstate_t *jit, ctx_t *ctx, struct rb_call_data *cd, rb_iseq_t
}
switch (METHOD_ENTRY_VISI(cme)) {
case METHOD_VISI_PUBLIC:
case METHOD_VISI_PUBLIC:
// Can always call public methods
break;
case METHOD_VISI_PRIVATE:
case METHOD_VISI_PRIVATE:
if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
// Can only call private methods with FCALL callsites.
// (at the moment they are callsites without a receiver or an explicit `self` receiver)
return YJIT_CANT_COMPILE;
}
break;
case METHOD_VISI_PROTECTED:
case METHOD_VISI_PROTECTED:
jit_protected_callee_ancestry_guard(jit, cb, cme, side_exit);
break;
case METHOD_VISI_UNDEF:
case METHOD_VISI_UNDEF:
RUBY_ASSERT(false && "cmes should always have a visibility");
break;
}
@ -3683,11 +3676,11 @@ gen_send_general(jitstate_t *jit, ctx_t *ctx, struct rb_call_data *cd, rb_iseq_t
while (true) {
// switch on the method type
switch (cme->def->type) {
case VM_METHOD_TYPE_ISEQ:
case VM_METHOD_TYPE_ISEQ:
return gen_send_iseq(jit, ctx, ci, cme, block, argc);
case VM_METHOD_TYPE_CFUNC:
case VM_METHOD_TYPE_CFUNC:
return gen_send_cfunc(jit, ctx, ci, cme, block, argc, &comptime_recv_klass);
case VM_METHOD_TYPE_IVAR:
case VM_METHOD_TYPE_IVAR:
if (argc != 0) {
// Argument count mismatch. Getters take no arguments.
GEN_COUNTER_INC(cb, send_getter_arity);
@ -3699,7 +3692,7 @@ gen_send_general(jitstate_t *jit, ctx_t *ctx, struct rb_call_data *cd, rb_iseq_t
ID ivar_name = cme->def->body.attr.id;
return gen_get_ivar(jit, ctx, SEND_MAX_DEPTH, comptime_recv, ivar_name, recv_opnd, side_exit);
}
case VM_METHOD_TYPE_ATTRSET:
case VM_METHOD_TYPE_ATTRSET:
if (argc != 1 || !RB_TYPE_P(comptime_recv, T_OBJECT)) {
GEN_COUNTER_INC(cb, send_ivar_set_method);
return YJIT_CANT_COMPILE;
@ -3707,33 +3700,33 @@ gen_send_general(jitstate_t *jit, ctx_t *ctx, struct rb_call_data *cd, rb_iseq_t
ID ivar_name = cme->def->body.attr.id;
return gen_set_ivar(jit, ctx, comptime_recv, comptime_recv_klass, ivar_name);
}
case VM_METHOD_TYPE_BMETHOD:
case VM_METHOD_TYPE_BMETHOD:
GEN_COUNTER_INC(cb, send_bmethod);
return YJIT_CANT_COMPILE;
case VM_METHOD_TYPE_ZSUPER:
case VM_METHOD_TYPE_ZSUPER:
GEN_COUNTER_INC(cb, send_zsuper_method);
return YJIT_CANT_COMPILE;
case VM_METHOD_TYPE_ALIAS: {
case VM_METHOD_TYPE_ALIAS: {
// Retrieve the alised method and re-enter the switch
cme = rb_aliased_callable_method_entry(cme);
continue;
}
case VM_METHOD_TYPE_UNDEF:
}
case VM_METHOD_TYPE_UNDEF:
GEN_COUNTER_INC(cb, send_undef_method);
return YJIT_CANT_COMPILE;
case VM_METHOD_TYPE_NOTIMPLEMENTED:
case VM_METHOD_TYPE_NOTIMPLEMENTED:
GEN_COUNTER_INC(cb, send_not_implemented_method);
return YJIT_CANT_COMPILE;
case VM_METHOD_TYPE_OPTIMIZED:
case VM_METHOD_TYPE_OPTIMIZED:
GEN_COUNTER_INC(cb, send_optimized_method);
return YJIT_CANT_COMPILE;
case VM_METHOD_TYPE_MISSING:
case VM_METHOD_TYPE_MISSING:
GEN_COUNTER_INC(cb, send_missing_method);
return YJIT_CANT_COMPILE;
case VM_METHOD_TYPE_REFINED:
case VM_METHOD_TYPE_REFINED:
GEN_COUNTER_INC(cb, send_refined_method);
return YJIT_CANT_COMPILE;
// no default case so compiler issues a warning if this is not exhaustive
// no default case so compiler issues a warning if this is not exhaustive
}
// Unreachable
@ -3831,12 +3824,12 @@ gen_invokesuper(jitstate_t *jit, ctx_t *ctx, codeblock_t *cb)
// Check that we'll be able to write this method dispatch before generating checks
switch (cme->def->type) {
case VM_METHOD_TYPE_ISEQ:
case VM_METHOD_TYPE_CFUNC:
break;
default:
// others unimplemented
return YJIT_CANT_COMPILE;
case VM_METHOD_TYPE_ISEQ:
case VM_METHOD_TYPE_CFUNC:
break;
default:
// others unimplemented
return YJIT_CANT_COMPILE;
}
// Guard that the receiver has the same class as the one from compile time
@ -3881,12 +3874,12 @@ gen_invokesuper(jitstate_t *jit, ctx_t *ctx, codeblock_t *cb)
ctx_clear_local_types(ctx);
switch (cme->def->type) {
case VM_METHOD_TYPE_ISEQ:
return gen_send_iseq(jit, ctx, ci, cme, block, argc);
case VM_METHOD_TYPE_CFUNC:
return gen_send_cfunc(jit, ctx, ci, cme, block, argc, NULL);
default:
break;
case VM_METHOD_TYPE_ISEQ:
return gen_send_iseq(jit, ctx, ci, cme, block, argc);
case VM_METHOD_TYPE_CFUNC:
return gen_send_cfunc(jit, ctx, ci, cme, block, argc, NULL);
default:
break;
}
RUBY_ASSERT_ALWAYS(false);
@ -4054,24 +4047,24 @@ gen_getspecial(jitstate_t *jit, ctx_t *ctx, codeblock_t *cb)
mov(cb, C_ARG_REGS[0], RAX);
switch (type >> 1) {
case '&':
ADD_COMMENT(cb, "rb_reg_last_match");
call_ptr(cb, REG0, (void *)rb_reg_last_match);
break;
case '`':
ADD_COMMENT(cb, "rb_reg_match_pre");
call_ptr(cb, REG0, (void *)rb_reg_match_pre);
break;
case '\'':
ADD_COMMENT(cb, "rb_reg_match_post");
call_ptr(cb, REG0, (void *)rb_reg_match_post);
break;
case '+':
ADD_COMMENT(cb, "rb_reg_match_last");
call_ptr(cb, REG0, (void *)rb_reg_match_last);
break;
default:
rb_bug("invalid back-ref");
case '&':
ADD_COMMENT(cb, "rb_reg_last_match");
call_ptr(cb, REG0, (void *)rb_reg_last_match);
break;
case '`':
ADD_COMMENT(cb, "rb_reg_match_pre");
call_ptr(cb, REG0, (void *)rb_reg_match_pre);
break;
case '\'':
ADD_COMMENT(cb, "rb_reg_match_post");
call_ptr(cb, REG0, (void *)rb_reg_match_post);
break;
case '+':
ADD_COMMENT(cb, "rb_reg_match_last");
call_ptr(cb, REG0, (void *)rb_reg_match_last);
break;
default:
rb_bug("invalid back-ref");
}
x86opnd_t stack_ret = ctx_stack_push(ctx, TYPE_UNKNOWN);

View file

@ -144,15 +144,14 @@ ctx_get_opnd_type(const ctx_t *ctx, insn_opnd_t opnd)
temp_mapping_t mapping = ctx->temp_mapping[stack_idx];
switch (mapping.kind)
{
case TEMP_SELF:
switch (mapping.kind) {
case TEMP_SELF:
return ctx->self_type;
case TEMP_STACK:
case TEMP_STACK:
return ctx->temp_types[ctx->stack_size - 1 - opnd.idx];
case TEMP_LOCAL:
case TEMP_LOCAL:
RUBY_ASSERT(mapping.idx < MAX_LOCAL_TYPES);
return ctx->local_types[mapping.idx];
}
@ -188,17 +187,16 @@ void ctx_upgrade_opnd_type(ctx_t *ctx, insn_opnd_t opnd, val_type_t type)
temp_mapping_t mapping = ctx->temp_mapping[stack_idx];
switch (mapping.kind)
{
case TEMP_SELF:
switch (mapping.kind) {
case TEMP_SELF:
UPGRADE_TYPE(ctx->self_type, type);
break;
case TEMP_STACK:
case TEMP_STACK:
UPGRADE_TYPE(ctx->temp_types[stack_idx], type);
break;
case TEMP_LOCAL:
case TEMP_LOCAL:
RUBY_ASSERT(mapping.idx < MAX_LOCAL_TYPES);
UPGRADE_TYPE(ctx->local_types[mapping.idx], type);
break;
@ -320,15 +318,15 @@ yjit_type_of_value(VALUE val)
}
} else {
switch (BUILTIN_TYPE(val)) {
case T_ARRAY:
return TYPE_ARRAY;
case T_HASH:
return TYPE_HASH;
case T_STRING:
return TYPE_STRING;
default:
// generic heap object
return TYPE_HEAP;
case T_ARRAY:
return TYPE_ARRAY;
case T_HASH:
return TYPE_HASH;
case T_STRING:
return TYPE_STRING;
default:
// generic heap object
return TYPE_HEAP;
}
}
}
@ -340,32 +338,32 @@ yjit_type_name(val_type_t type)
RUBY_ASSERT(!(type.is_imm && type.is_heap));
switch (type.type) {
case ETYPE_UNKNOWN:
if (type.is_imm) {
return "unknown immediate";
} else if (type.is_heap) {
return "unknown heap";
} else {
return "unknown";
}
case ETYPE_NIL:
return "nil";
case ETYPE_TRUE:
return "true";
case ETYPE_FALSE:
return "false";
case ETYPE_FIXNUM:
return "fixnum";
case ETYPE_FLONUM:
return "flonum";
case ETYPE_ARRAY:
return "array";
case ETYPE_HASH:
return "hash";
case ETYPE_SYMBOL:
return "symbol";
case ETYPE_STRING:
return "string";
case ETYPE_UNKNOWN:
if (type.is_imm) {
return "unknown immediate";
} else if (type.is_heap) {
return "unknown heap";
} else {
return "unknown";
}
case ETYPE_NIL:
return "nil";
case ETYPE_TRUE:
return "true";
case ETYPE_FALSE:
return "false";
case ETYPE_FIXNUM:
return "fixnum";
case ETYPE_FLONUM:
return "flonum";
case ETYPE_ARRAY:
return "array";
case ETYPE_HASH:
return "hash";
case ETYPE_SYMBOL:
return "symbol";
case ETYPE_STRING:
return "string";
}
UNREACHABLE_RETURN("");
@ -901,16 +899,15 @@ void gen_branch(
void
gen_jump_branch(codeblock_t *cb, uint8_t *target0, uint8_t *target1, uint8_t shape)
{
switch (shape)
{
case SHAPE_NEXT0:
switch (shape) {
case SHAPE_NEXT0:
break;
case SHAPE_NEXT1:
case SHAPE_NEXT1:
RUBY_ASSERT(false);
break;
case SHAPE_DEFAULT:
case SHAPE_DEFAULT:
jmp_ptr(cb, target0);
break;
}