1 /*
2  *
3  * Copyright 2015 gRPC authors.
4  *
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at
8  *
9  *     http://www.apache.org/licenses/LICENSE-2.0
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  *
17  */
18 
19 #include <grpc/support/port_platform.h>
20 
21 #include "src/core/lib/channel/channel_stack.h"
22 
23 #include <stdlib.h>
24 #include <string.h>
25 
26 #include <grpc/support/alloc.h>
27 #include <grpc/support/log.h>
28 
29 #include "src/core/lib/gpr/alloc.h"
30 
31 grpc_core::TraceFlag grpc_trace_channel(false, "channel");
32 
33 /* Memory layouts.
34 
35    Channel stack is laid out as: {
36      grpc_channel_stack stk;
37      padding to GPR_MAX_ALIGNMENT
38      grpc_channel_element[stk.count];
39      per-filter memory, aligned to GPR_MAX_ALIGNMENT
40    }
41 
42    Call stack is laid out as: {
43      grpc_call_stack stk;
44      padding to GPR_MAX_ALIGNMENT
45      grpc_call_element[stk.count];
46      per-filter memory, aligned to GPR_MAX_ALIGNMENT
47    } */
48 
grpc_channel_stack_size(const grpc_channel_filter ** filters,size_t filter_count)49 size_t grpc_channel_stack_size(const grpc_channel_filter** filters,
50                                size_t filter_count) {
51   /* always need the header, and size for the channel elements */
52   size_t size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_channel_stack)) +
53                 GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filter_count *
54                                                sizeof(grpc_channel_element));
55   size_t i;
56 
57   GPR_ASSERT((GPR_MAX_ALIGNMENT & (GPR_MAX_ALIGNMENT - 1)) == 0 &&
58              "GPR_MAX_ALIGNMENT must be a power of two");
59 
60   /* add the size for each filter */
61   for (i = 0; i < filter_count; i++) {
62     size += GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_channel_data);
63   }
64 
65   return size;
66 }
67 
68 #define CHANNEL_ELEMS_FROM_STACK(stk)                                     \
69   ((grpc_channel_element*)((char*)(stk) + GPR_ROUND_UP_TO_ALIGNMENT_SIZE( \
70                                               sizeof(grpc_channel_stack))))
71 
72 #define CALL_ELEMS_FROM_STACK(stk)                                     \
73   ((grpc_call_element*)((char*)(stk) + GPR_ROUND_UP_TO_ALIGNMENT_SIZE( \
74                                            sizeof(grpc_call_stack))))
75 
grpc_channel_stack_element(grpc_channel_stack * channel_stack,size_t index)76 grpc_channel_element* grpc_channel_stack_element(
77     grpc_channel_stack* channel_stack, size_t index) {
78   return CHANNEL_ELEMS_FROM_STACK(channel_stack) + index;
79 }
80 
grpc_channel_stack_last_element(grpc_channel_stack * channel_stack)81 grpc_channel_element* grpc_channel_stack_last_element(
82     grpc_channel_stack* channel_stack) {
83   return grpc_channel_stack_element(channel_stack, channel_stack->count - 1);
84 }
85 
grpc_channel_stack_filter_instance_number(grpc_channel_stack * channel_stack,grpc_channel_element * elem)86 size_t grpc_channel_stack_filter_instance_number(
87     grpc_channel_stack* channel_stack, grpc_channel_element* elem) {
88   size_t num_found = 0;
89   for (size_t i = 0; i < channel_stack->count; ++i) {
90     grpc_channel_element* element =
91         grpc_channel_stack_element(channel_stack, i);
92     if (element == elem) break;
93     if (element->filter == elem->filter) ++num_found;
94   }
95   return num_found;
96 }
97 
grpc_call_stack_element(grpc_call_stack * call_stack,size_t index)98 grpc_call_element* grpc_call_stack_element(grpc_call_stack* call_stack,
99                                            size_t index) {
100   return CALL_ELEMS_FROM_STACK(call_stack) + index;
101 }
102 
grpc_channel_stack_init(int initial_refs,grpc_iomgr_cb_func destroy,void * destroy_arg,const grpc_channel_filter ** filters,size_t filter_count,const grpc_channel_args * channel_args,grpc_transport * optional_transport,const char * name,grpc_channel_stack * stack)103 grpc_error_handle grpc_channel_stack_init(
104     int initial_refs, grpc_iomgr_cb_func destroy, void* destroy_arg,
105     const grpc_channel_filter** filters, size_t filter_count,
106     const grpc_channel_args* channel_args, grpc_transport* optional_transport,
107     const char* name, grpc_channel_stack* stack) {
108   size_t call_size =
109       GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_call_stack)) +
110       GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filter_count * sizeof(grpc_call_element));
111   grpc_channel_element* elems;
112   grpc_channel_element_args args;
113   char* user_data;
114   size_t i;
115 
116   stack->count = filter_count;
117   GRPC_STREAM_REF_INIT(&stack->refcount, initial_refs, destroy, destroy_arg,
118                        name);
119   elems = CHANNEL_ELEMS_FROM_STACK(stack);
120   user_data = (reinterpret_cast<char*>(elems)) +
121               GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filter_count *
122                                              sizeof(grpc_channel_element));
123 
124   /* init per-filter data */
125   grpc_error_handle first_error = GRPC_ERROR_NONE;
126   for (i = 0; i < filter_count; i++) {
127     args.channel_stack = stack;
128     args.channel_args = channel_args;
129     args.optional_transport = optional_transport;
130     args.is_first = i == 0;
131     args.is_last = i == (filter_count - 1);
132     elems[i].filter = filters[i];
133     elems[i].channel_data = user_data;
134     grpc_error_handle error =
135         elems[i].filter->init_channel_elem(&elems[i], &args);
136     if (error != GRPC_ERROR_NONE) {
137       if (first_error == GRPC_ERROR_NONE) {
138         first_error = error;
139       } else {
140         GRPC_ERROR_UNREF(error);
141       }
142     }
143     user_data +=
144         GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_channel_data);
145     call_size += GPR_ROUND_UP_TO_ALIGNMENT_SIZE(filters[i]->sizeof_call_data);
146   }
147 
148   GPR_ASSERT(user_data > (char*)stack);
149   GPR_ASSERT((uintptr_t)(user_data - (char*)stack) ==
150              grpc_channel_stack_size(filters, filter_count));
151 
152   stack->call_stack_size = call_size;
153   return first_error;
154 }
155 
grpc_channel_stack_destroy(grpc_channel_stack * stack)156 void grpc_channel_stack_destroy(grpc_channel_stack* stack) {
157   grpc_channel_element* channel_elems = CHANNEL_ELEMS_FROM_STACK(stack);
158   size_t count = stack->count;
159   size_t i;
160 
161   /* destroy per-filter data */
162   for (i = 0; i < count; i++) {
163     channel_elems[i].filter->destroy_channel_elem(&channel_elems[i]);
164   }
165 }
166 
grpc_call_stack_init(grpc_channel_stack * channel_stack,int initial_refs,grpc_iomgr_cb_func destroy,void * destroy_arg,const grpc_call_element_args * elem_args)167 grpc_error_handle grpc_call_stack_init(
168     grpc_channel_stack* channel_stack, int initial_refs,
169     grpc_iomgr_cb_func destroy, void* destroy_arg,
170     const grpc_call_element_args* elem_args) {
171   grpc_channel_element* channel_elems = CHANNEL_ELEMS_FROM_STACK(channel_stack);
172   size_t count = channel_stack->count;
173   grpc_call_element* call_elems;
174   char* user_data;
175 
176   elem_args->call_stack->count = count;
177   GRPC_STREAM_REF_INIT(&elem_args->call_stack->refcount, initial_refs, destroy,
178                        destroy_arg, "CALL_STACK");
179   call_elems = CALL_ELEMS_FROM_STACK(elem_args->call_stack);
180   user_data = (reinterpret_cast<char*>(call_elems)) +
181               GPR_ROUND_UP_TO_ALIGNMENT_SIZE(count * sizeof(grpc_call_element));
182 
183   /* init per-filter data */
184   grpc_error_handle first_error = GRPC_ERROR_NONE;
185   for (size_t i = 0; i < count; i++) {
186     call_elems[i].filter = channel_elems[i].filter;
187     call_elems[i].channel_data = channel_elems[i].channel_data;
188     call_elems[i].call_data = user_data;
189     user_data +=
190         GPR_ROUND_UP_TO_ALIGNMENT_SIZE(call_elems[i].filter->sizeof_call_data);
191   }
192   for (size_t i = 0; i < count; i++) {
193     grpc_error_handle error =
194         call_elems[i].filter->init_call_elem(&call_elems[i], elem_args);
195     if (error != GRPC_ERROR_NONE) {
196       if (first_error == GRPC_ERROR_NONE) {
197         first_error = error;
198       } else {
199         GRPC_ERROR_UNREF(error);
200       }
201     }
202   }
203   return first_error;
204 }
205 
grpc_call_stack_set_pollset_or_pollset_set(grpc_call_stack * call_stack,grpc_polling_entity * pollent)206 void grpc_call_stack_set_pollset_or_pollset_set(grpc_call_stack* call_stack,
207                                                 grpc_polling_entity* pollent) {
208   size_t count = call_stack->count;
209   grpc_call_element* call_elems;
210   size_t i;
211 
212   call_elems = CALL_ELEMS_FROM_STACK(call_stack);
213 
214   /* init per-filter data */
215   for (i = 0; i < count; i++) {
216     call_elems[i].filter->set_pollset_or_pollset_set(&call_elems[i], pollent);
217   }
218 }
219 
grpc_call_stack_ignore_set_pollset_or_pollset_set(grpc_call_element *,grpc_polling_entity *)220 void grpc_call_stack_ignore_set_pollset_or_pollset_set(
221     grpc_call_element* /*elem*/, grpc_polling_entity* /*pollent*/) {}
222 
grpc_call_stack_destroy(grpc_call_stack * stack,const grpc_call_final_info * final_info,grpc_closure * then_schedule_closure)223 void grpc_call_stack_destroy(grpc_call_stack* stack,
224                              const grpc_call_final_info* final_info,
225                              grpc_closure* then_schedule_closure) {
226   grpc_call_element* elems = CALL_ELEMS_FROM_STACK(stack);
227   size_t count = stack->count;
228   size_t i;
229 
230   /* destroy per-filter data */
231   for (i = 0; i < count; i++) {
232     elems[i].filter->destroy_call_elem(
233         &elems[i], final_info,
234         i == count - 1 ? then_schedule_closure : nullptr);
235   }
236 }
237 
grpc_call_next_op(grpc_call_element * elem,grpc_transport_stream_op_batch * op)238 void grpc_call_next_op(grpc_call_element* elem,
239                        grpc_transport_stream_op_batch* op) {
240   grpc_call_element* next_elem = elem + 1;
241   GRPC_CALL_LOG_OP(GPR_INFO, next_elem, op);
242   next_elem->filter->start_transport_stream_op_batch(next_elem, op);
243 }
244 
grpc_channel_next_get_info(grpc_channel_element * elem,const grpc_channel_info * channel_info)245 void grpc_channel_next_get_info(grpc_channel_element* elem,
246                                 const grpc_channel_info* channel_info) {
247   grpc_channel_element* next_elem = elem + 1;
248   next_elem->filter->get_channel_info(next_elem, channel_info);
249 }
250 
grpc_channel_next_op(grpc_channel_element * elem,grpc_transport_op * op)251 void grpc_channel_next_op(grpc_channel_element* elem, grpc_transport_op* op) {
252   grpc_channel_element* next_elem = elem + 1;
253   next_elem->filter->start_transport_op(next_elem, op);
254 }
255 
grpc_channel_stack_from_top_element(grpc_channel_element * elem)256 grpc_channel_stack* grpc_channel_stack_from_top_element(
257     grpc_channel_element* elem) {
258   return reinterpret_cast<grpc_channel_stack*>(
259       reinterpret_cast<char*>(elem) -
260       GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_channel_stack)));
261 }
262 
grpc_call_stack_from_top_element(grpc_call_element * elem)263 grpc_call_stack* grpc_call_stack_from_top_element(grpc_call_element* elem) {
264   return reinterpret_cast<grpc_call_stack*>(
265       reinterpret_cast<char*>(elem) -
266       GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(grpc_call_stack)));
267 }
268