Trailing whitespaces on Zend

Signed-off-by: Gabriel Caruso <carusogabriel34@gmail.com>
This commit is contained in:
Gabriel Caruso 2018-01-04 02:41:57 -02:00
parent c215b8d147
commit 6687e8db15
5 changed files with 17 additions and 17 deletions

View File

@ -64,7 +64,7 @@ typedef struct _zend_mm_debug_info {
} zend_mm_debug_info;
# define ZEND_MM_OVERHEAD ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info))
#else
#else
# define ZEND_MM_OVERHEAD 0
#endif
@ -376,7 +376,7 @@ static void apc_init_heap(void)
// Preallocate properly aligned SHM chunks (64MB)
tmp_data.mem = shm_memalign(ZEND_MM_CHUNK_SIZE, ZEND_MM_CHUNK_SIZE * 32);
// Initialize temporary storage data
tmp_data.free_pages = 0;
@ -389,7 +389,7 @@ static void apc_init_heap(void)
zend_hash_init(apc_ht, 64, NULL, ZVAL_PTR_DTOR, 0);
zend_mm_set_heap(old_heap);
}
*/
END_EXTERN_C()

View File

@ -30,7 +30,7 @@ extern ZEND_API zend_class_entry *zend_ce_ClosedGeneratorException;
typedef struct _zend_generator_node zend_generator_node;
typedef struct _zend_generator zend_generator;
/* The concept of `yield from` exposes problems when accessed at different levels of the chain of delegated generators. We need to be able to reference the currently executed Generator in all cases and still being able to access the return values of finished Generators.
/* The concept of `yield from` exposes problems when accessed at different levels of the chain of delegated generators. We need to be able to reference the currently executed Generator in all cases and still being able to access the return values of finished Generators.
* The solution to this problem is a doubly-linked tree, which all Generators referenced in maintain a reference to. It should be impossible to avoid walking the tree in all cases. This way, we only need tree walks from leaf to root in case where some part of the `yield from` chain is passed to another `yield from`. (Update of leaf node pointer and list of multi-children nodes needed when leaf gets a child in direct path from leaf to root node.) But only in that case, which should be a fairly rare case (which is then possible, but not totally cheap).
* The root of the tree is then the currently executed Generator. The subnodes of the tree (all except the root node) are all Generators which do `yield from`. Each node of the tree knows a pointer to one leaf descendant node. Each node with multiple children needs a list of all leaf descendant nodes paired with pointers to their respective child node. (The stack is determined by leaf node pointers) Nodes with only one child just don't need a list, there it is enough to just have a pointer to the child node. Further, leaf nodes store a pointer to the root node.
* That way, when we advance any generator, we just need to look up a leaf node (which all have a reference to a root node). Then we can see at the root node whether current Generator is finished. If it isn't, all is fine and we can just continue. If the Generator finished, there will be two cases. Either it is a simple node with just one child, then go down to child node. Or it has multiple children and we now will remove the current leaf node from the list of nodes (unnecessary, is microoptimization) and go down to the child node whose reference was paired with current leaf node. Child node is then removed its parent reference and becomes new top node. Or the current node references the Generator we're currently executing, then we can continue from the YIELD_FROM opcode. When a node referenced as root node in a leaf node has a parent, then we go the way up until we find a root node without parent.

View File

@ -337,7 +337,7 @@ static zend_always_inline zval *zend_hash_find_ind(const HashTable *ht, zend_str
zval *zv;
zv = zend_hash_find(ht, key);
return (zv && Z_TYPE_P(zv) == IS_INDIRECT) ?
return (zv && Z_TYPE_P(zv) == IS_INDIRECT) ?
((Z_TYPE_P(Z_INDIRECT_P(zv)) != IS_UNDEF) ? Z_INDIRECT_P(zv) : NULL) : zv;
}
@ -367,7 +367,7 @@ static zend_always_inline zval *zend_hash_str_find_ind(const HashTable *ht, cons
zval *zv;
zv = zend_hash_str_find(ht, str, len);
return (zv && Z_TYPE_P(zv) == IS_INDIRECT) ?
return (zv && Z_TYPE_P(zv) == IS_INDIRECT) ?
((Z_TYPE_P(Z_INDIRECT_P(zv)) != IS_UNDEF) ? Z_INDIRECT_P(zv) : NULL) : zv;
}
@ -1088,7 +1088,7 @@ static zend_always_inline zval *_zend_hash_append(HashTable *ht, zend_string *ke
if (!ZSTR_IS_INTERNED(key)) {
ht->u.flags &= ~HASH_FLAG_STATIC_KEYS;
zend_string_addref(key);
zend_string_hash_val(key);
zend_string_hash_val(key);
}
p->key = key;
p->h = ZSTR_H(key);
@ -1110,7 +1110,7 @@ static zend_always_inline zval *_zend_hash_append_ptr(HashTable *ht, zend_string
if (!ZSTR_IS_INTERNED(key)) {
ht->u.flags &= ~HASH_FLAG_STATIC_KEYS;
zend_string_addref(key);
zend_string_hash_val(key);
zend_string_hash_val(key);
}
p->key = key;
p->h = ZSTR_H(key);
@ -1132,7 +1132,7 @@ static zend_always_inline void _zend_hash_append_ind(HashTable *ht, zend_string
if (!ZSTR_IS_INTERNED(key)) {
ht->u.flags &= ~HASH_FLAG_STATIC_KEYS;
zend_string_addref(key);
zend_string_hash_val(key);
zend_string_hash_val(key);
}
p->key = key;
p->h = ZSTR_H(key);

View File

@ -34,7 +34,7 @@
#endif
/* TODO check to undef this option, this might
make more perf. destroy_freelist()
make more perf. destroy_freelist()
should be adapted then. */
#define Omit_Private_Memory 1
@ -140,7 +140,7 @@ typedef unsigned long int uint32_t;
} else if (1 == x) { \
tsrm_mutex_unlock(pow5mult_mutex); \
}
#endif

View File

@ -957,7 +957,7 @@ ZEND_VM_INLINE_HELPER(zend_binary_assign_op_helper, VAR|UNUSED|THIS|CV, CONST|TM
ZEND_VM_DISPATCH_TO_HELPER(zend_binary_assign_op_dim_helper, binary_op, binary_op);
}
# endif
ZEND_VM_DISPATCH_TO_HELPER(zend_binary_assign_op_obj_helper, binary_op, binary_op);
#endif
}
@ -1514,7 +1514,7 @@ ZEND_VM_HELPER(zend_fetch_static_prop_helper, CONST|TMPVAR|CV, UNUSED|CONST|VAR,
varname = GET_OP1_ZVAL_PTR_UNDEF(BP_VAR_R);
retval = zend_fetch_static_property_address(varname, OP1_TYPE, opline->op2, OP2_TYPE, type EXECUTE_DATA_CC OPLINE_CC);
if (UNEXPECTED(retval == NULL)) {
if (EG(exception)) {
FREE_OP1();
@ -2485,7 +2485,7 @@ ZEND_VM_HOT_HANDLER(43, ZEND_JMPZ, CONST|TMPVAR|CV, JMP_ADDR)
zval *val;
val = GET_OP1_ZVAL_PTR_UNDEF(BP_VAR_R);
if (Z_TYPE_INFO_P(val) == IS_TRUE) {
ZEND_VM_SET_NEXT_OPCODE(opline + 1);
ZEND_VM_CONTINUE();
@ -3219,7 +3219,7 @@ ZEND_VM_HANDLER(113, ZEND_INIT_STATIC_METHOD_CALL, UNUSED|CLASS_FETCH|CONST|VAR,
if (OP1_TYPE == IS_UNUSED) {
/* previous opcode is ZEND_FETCH_CLASS */
if ((opline->op1.num & ZEND_FETCH_CLASS_MASK) == ZEND_FETCH_CLASS_PARENT ||
if ((opline->op1.num & ZEND_FETCH_CLASS_MASK) == ZEND_FETCH_CLASS_PARENT ||
(opline->op1.num & ZEND_FETCH_CLASS_MASK) == ZEND_FETCH_CLASS_SELF) {
if (Z_TYPE(EX(This)) == IS_OBJECT) {
ce = Z_OBJCE(EX(This));
@ -3688,7 +3688,7 @@ ZEND_VM_HOT_HANDLER(60, ZEND_DO_FCALL, ANY, ANY, SPEC(RETVAL))
} else {
zend_execute_internal(call, ret);
}
#if ZEND_DEBUG
if (!EG(exception) && call->func) {
ZEND_ASSERT(!(call->func->common.fn_flags & ZEND_ACC_HAS_RETURN_TYPE) ||
@ -6323,7 +6323,7 @@ ZEND_VM_HANDLER(180, ZEND_ISSET_ISEMPTY_STATIC_PROP, CONST|TMPVAR|CV, UNUSED|CLA
if (OP1_TYPE == IS_CONST && value) {
CACHE_POLYMORPHIC_PTR(Z_CACHE_SLOT_P(RT_CONSTANT(opline, opline->op1)), ce, value);
}
}
if (OP1_TYPE != IS_CONST) {
zend_tmp_string_release(tmp_name);