OmniSciDB  a5dc49c757
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
RuntimeFunctions.cpp File Reference
#include "RuntimeFunctions.h"
#include "BufferCompaction.h"
#include "DecisionTreeEntry.h"
#include "HyperLogLogRank.h"
#include "MurmurHash.h"
#include "Shared/Datum.h"
#include "Shared/quantile.h"
#include "TypePunning.h"
#include "Utils/SegmentTreeUtils.h"
#include <atomic>
#include <cfloat>
#include <chrono>
#include <cmath>
#include <cstring>
#include <functional>
#include <thread>
#include <tuple>
#include "DecodersImpl.h"
#include "GeoOpsRuntime.cpp"
#include "GroupByRuntime.cpp"
#include "JoinHashTable/Runtime/JoinHashTableQueryRuntime.cpp"
#include "TopKRuntime.cpp"
+ Include dependency graph for RuntimeFunctions.cpp:

Go to the source code of this file.

Namespaces

 anonymous_namespace{RuntimeFunctions.cpp}
 

Macros

#define DEF_ARITH_NULLABLE(type, null_type, opname, opsym)
 
#define DEF_ARITH_NULLABLE_LHS(type, null_type, opname, opsym)
 
#define DEF_ARITH_NULLABLE_RHS(type, null_type, opname, opsym)
 
#define DEF_CMP_NULLABLE(type, null_type, opname, opsym)
 
#define DEF_CMP_NULLABLE_LHS(type, null_type, opname, opsym)
 
#define DEF_CMP_NULLABLE_RHS(type, null_type, opname, opsym)
 
#define DEF_SAFE_DIV_NULLABLE(type, null_type, opname)
 
#define DEF_BINARY_NULLABLE_ALL_OPS(type, null_type)
 
#define DEF_MAP_STRING_TO_DATUM(value_type, value_name)
 
#define DEF_UMINUS_NULLABLE(type, null_type)
 
#define DEF_CAST_NULLABLE(from_type, to_type)
 
#define DEF_CAST_SCALED_NULLABLE(from_type, to_type)
 
#define DEF_CAST_NULLABLE_BIDIR(type1, type2)
 
#define DEF_ROUND_NULLABLE(from_type, to_type)
 
#define GPU_RT_STUB   NEVER_INLINE __attribute__((optnone))
 
#define DEF_COMPUTE_CURRENT_ROW_IDX_IN_FRAME(value_type, oper_name)
 
#define DEF_COMPUTE_CURRENT_ROW_IDX_IN_FRAME_ALL_TYPES(oper_name)
 
#define DEF_RANGE_MODE_FRAME_LOWER_BOUND(target_val_type, col_type, null_type, opname, opsym)
 
#define DEF_RANGE_MODE_FRAME_UPPER_BOUND(target_val_type, col_type, null_type, opname, opsym)
 
#define DEF_GET_VALUE_IN_FRAME(col_type, logical_type)
 
#define DEF_SEARCH_AGGREGATION_TREE(agg_value_type)
 
#define DEF_SEARCH_DERIVED_AGGREGATION_TREE(agg_value_type)
 
#define DEF_HANDLE_NULL_FOR_WINDOW_FRAMING_AGG(agg_type, null_type)
 
#define DEF_FILL_MISSING_VALUE(col_type)
 
#define DEF_AGG_MAX_INT(n)
 
#define DEF_AGG_MIN_INT(n)
 
#define DEF_AGG_ID_INT(n)
 
#define DEF_CHECKED_SINGLE_AGG_ID_INT(n)
 
#define DEF_WRITE_PROJECTION_INT(n)
 
#define DEF_SKIP_AGG_ADD(base_agg_func)
 
#define DEF_SKIP_AGG(base_agg_func)
 
#define DATA_T   int64_t
 
#define DATA_T   int32_t
 
#define DATA_T   int16_t
 
#define DATA_T   int8_t
 
#define DEF_SKIP_AGG(base_agg_func)
 
#define DEF_SKIP_IF_AGG(skip_agg_func, base_agg_func)
 
#define DATA_T   double
 
#define ADDR_T   int64_t
 
#define DATA_T   float
 
#define ADDR_T   int32_t
 
#define DEF_SHARED_AGG_RET_STUBS(base_agg_func)
 
#define DEF_SHARED_AGG_STUBS(base_agg_func)
 

Enumerations

enum  anonymous_namespace{RuntimeFunctions.cpp}::AggFuncType { anonymous_namespace{RuntimeFunctions.cpp}::AggFuncType::MIN, anonymous_namespace{RuntimeFunctions.cpp}::AggFuncType::MAX, anonymous_namespace{RuntimeFunctions.cpp}::AggFuncType::SUM }
 

Functions

RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
scale_decimal_up (const int64_t operand, const uint64_t scale, const int64_t operand_null_val, const int64_t result_null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
scale_decimal_down_nullable (const int64_t operand, const int64_t scale, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
scale_decimal_down_not_nullable (const int64_t operand, const int64_t scale, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
floor_div_lhs (const int64_t dividend, const int64_t divisor)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
floor_div_nullable_lhs (const int64_t dividend, const int64_t divisor, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_not (const int8_t operand, const int8_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_and (const int8_t lhs, const int8_t rhs, const int8_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_or (const int8_t lhs, const int8_t rhs, const int8_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count (uint64_t *agg, const int64_t)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap (int64_t *agg, const int64_t val, const int64_t min_val, const int64_t bucket_size)
 
GPU_RT_STUB void agg_count_distinct_bitmap_gpu (int64_t *, const int64_t, const int64_t, const int64_t, const int64_t, const int64_t, const uint64_t, const uint64_t)
 
RUNTIME_EXPORT NEVER_INLINE void agg_approximate_count_distinct (int64_t *agg, const int64_t key, const uint32_t b)
 
GPU_RT_STUB void agg_approximate_count_distinct_gpu (int64_t *, const int64_t, const uint32_t, const int64_t, const int64_t)
 
RUNTIME_EXPORT ALWAYS_INLINE int8_t bit_is_set (const int8_t *bitset, const int64_t val, const int64_t min_val, const int64_t max_val, const int64_t null_val, const int8_t null_bool_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
compute_int64_t_lower_bound (const int64_t entry_cnt, const int64_t target_value, const int64_t *col_buf)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
get_valid_buf_start_pos (const int64_t null_start_pos, const int64_t null_end_pos)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
get_valid_buf_end_pos (const int64_t num_elems, const int64_t null_start_pos, const int64_t null_end_pos)
 
template<typename T , typename Comparator >
int64_t compute_current_row_idx_in_frame (const int64_t num_elems, const int64_t cur_row_idx, const T *col_buf, const int32_t *partition_rowid_buf, const int64_t *ordered_index_buf, const T null_val, const bool nulls_first, const int64_t null_start_pos, const int64_t null_end_pos, Comparator cmp)
 
template<typename TARGET_VAL_TYPE , typename COL_TYPE , typename NULL_TYPE >
int64_t compute_lower_bound_from_ordered_partition_index (const int64_t num_elems, const TARGET_VAL_TYPE target_val, const COL_TYPE *col_buf, const int32_t *partition_rowid_buf, const int64_t *ordered_index_buf, const NULL_TYPE null_val, const bool nulls_first, const int64_t null_start_offset, const int64_t null_end_offset)
 
template<typename TARGET_VAL_TYPE , typename COL_TYPE , typename NULL_TYPE >
int64_t compute_upper_bound_from_ordered_partition_index (const int64_t num_elems, const TARGET_VAL_TYPE target_val, const COL_TYPE *col_buf, const int32_t *partition_rowid_buf, const int64_t *ordered_index_buf, const NULL_TYPE null_val, const bool nulls_first, const int64_t null_start_offset, const int64_t null_end_offset)
 
template<typename COL_TYPE , typename LOGICAL_TYPE >
LOGICAL_TYPE get_value_in_window_frame (const int64_t target_row_idx_in_frame, const int64_t frame_start_offset, const int64_t frame_end_offset, const COL_TYPE *col_buf, const int32_t *partition_rowid_buf, const int64_t *ordered_index_buf, const LOGICAL_TYPE logical_null_val, const LOGICAL_TYPE col_null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
encode_date (int64_t decoded_val, int64_t null_val, int64_t multiplier)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
compute_row_mode_start_index_sub (int64_t candidate_index, int64_t current_partition_start_offset, int64_t frame_bound)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
compute_row_mode_start_index_add (int64_t candidate_index, int64_t current_partition_start_offset, int64_t frame_bound, int64_t num_current_partition_elem)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
compute_row_mode_end_index_sub (int64_t candidate_index, int64_t current_partition_start_offset, int64_t frame_bound)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
compute_row_mode_end_index_add (int64_t candidate_index, int64_t current_partition_start_offset, int64_t frame_bound, int64_t num_current_partition_elem)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_integer_aggregation_tree (int64_t **aggregation_trees, size_t partition_idx)
 
RUNTIME_EXPORT ALWAYS_INLINE
double * 
get_double_aggregation_tree (int64_t **aggregation_trees, size_t partition_idx)
 
RUNTIME_EXPORT ALWAYS_INLINE
SumAndCountPair< int64_t > * 
get_integer_derived_aggregation_tree (int64_t **aggregation_trees, size_t partition_idx)
 
RUNTIME_EXPORT ALWAYS_INLINE
SumAndCountPair< double > * 
get_double_derived_aggregation_tree (int64_t **aggregation_trees, size_t partition_idx)
 
RUNTIME_EXPORT ALWAYS_INLINE size_t getStartOffsetForSegmentTreeTraversal (size_t level, size_t tree_fanout)
 
template<AggFuncType AGG_FUNC_TYPE, typename AGG_TYPE >
AGG_TYPE anonymous_namespace{RuntimeFunctions.cpp}::agg_func (AGG_TYPE const lhs, AGG_TYPE const rhs)
 
template<AggFuncType AGG_FUNC_TYPE, typename AGG_TYPE >
AGG_TYPE compute_window_func_via_aggregation_tree (AGG_TYPE *aggregation_tree_for_partition, size_t query_range_start_idx, size_t query_range_end_idx, size_t leaf_level, size_t tree_fanout, AGG_TYPE init_val, AGG_TYPE invalid_val, AGG_TYPE null_val)
 
template<typename AGG_VALUE_TYPE >
void compute_derived_aggregates (SumAndCountPair< AGG_VALUE_TYPE > *aggregation_tree_for_partition, SumAndCountPair< AGG_VALUE_TYPE > &res, size_t query_range_start_idx, size_t query_range_end_idx, size_t leaf_level, size_t tree_fanout, AGG_VALUE_TYPE invalid_val, AGG_VALUE_TYPE null_val)
 
template<typename T >
fill_missing_value (int64_t const cur_idx, T const null_val, T *const col_buf, int64_t const num_elems_in_partition, int32_t *const partition_rowid_buf, int64_t *const ordered_index_buf, bool const is_forward_fill)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
agg_sum (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
agg_sum_if (int64_t *agg, const int64_t val, const int8_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_max (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_min (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_id (int64_t *agg, const int64_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int8_t * 
agg_id_varlen (int8_t *varlen_buffer, const int64_t offset, const int8_t *value, const int64_t size_bytes)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
checked_single_agg_id (int64_t *agg, const int64_t val, const int64_t null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap_skip_val (int64_t *agg, const int64_t val, const int64_t min_val, const int64_t bucket_size, const int64_t skip_val)
 
GPU_RT_STUB void agg_count_distinct_bitmap_skip_val_gpu (int64_t *, const int64_t, const int64_t, const int64_t, const int64_t, const int64_t, const int64_t, const uint64_t, const uint64_t)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_int32 (uint32_t *agg, const int32_t)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_if_int32 (uint32_t *agg, const int32_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
agg_sum_int32 (int32_t *agg, const int32_t val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
agg_sum_if_int32 (int32_t *agg, const int32_t val, const int8_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
agg_sum_skip_val (int64_t *agg, const int64_t val, const int64_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
agg_sum_int32_skip_val (int32_t *agg, const int32_t val, const int32_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
agg_sum_if_skip_val (int64_t *agg, const int64_t val, const int64_t skip_val, const int8_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
agg_sum_if_int32_skip_val (int32_t *agg, const int32_t val, const int32_t skip_val, const int8_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_if (uint64_t *agg, const int64_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_skip_val (uint64_t *agg, const int64_t val, const int64_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_if_skip_val (uint64_t *agg, const int64_t cond, const int64_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_int32_skip_val (uint32_t *agg, const int32_t val, const int32_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_if_int32_skip_val (uint32_t *agg, const int32_t cond, const int32_t skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_double (uint64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_if_double (int64_t *agg, const double val, const int8_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_double (int64_t *agg, const double val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
checked_single_agg_id_double (int64_t *agg, const double val, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_float (uint32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_if_sum_float (int32_t *agg, const float val, const int8_t cond)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_float (int32_t *agg, const float val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
checked_single_agg_id_float (int32_t *agg, const float val, const float null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint64_t 
agg_count_double_skip_val (uint64_t *agg, const double val, const double skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
uint32_t 
agg_count_float_skip_val (uint32_t *agg, const float val, const float skip_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
decimal_floor (const int64_t x, const int64_t scale)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
decimal_ceil (const int64_t x, const int64_t scale)
 
GPU_RT_STUB int8_t * agg_id_varlen_shared (int8_t *varlen_buffer, const int64_t offset, const int8_t *value, const int64_t size_bytes)
 
GPU_RT_STUB int32_t checked_single_agg_id_shared (int64_t *agg, const int64_t val, const int64_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_int32_shared (int32_t *agg, const int32_t val, const int32_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_int16_shared (int16_t *agg, const int16_t val, const int16_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_int8_shared (int8_t *agg, const int8_t val, const int8_t null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_double_shared (int64_t *agg, const double val, const double null_val)
 
GPU_RT_STUB int32_t checked_single_agg_id_float_shared (int32_t *agg, const float val, const float null_val)
 
GPU_RT_STUB void agg_max_int16_skip_val_shared (int16_t *agg, const int16_t val, const int16_t skip_val)
 
GPU_RT_STUB void agg_max_int8_skip_val_shared (int8_t *agg, const int8_t val, const int8_t skip_val)
 
GPU_RT_STUB void agg_min_int16_skip_val_shared (int16_t *agg, const int16_t val, const int16_t skip_val)
 
GPU_RT_STUB void agg_min_int8_skip_val_shared (int8_t *agg, const int8_t val, const int8_t skip_val)
 
GPU_RT_STUB void agg_id_double_shared_slow (int64_t *agg, const double *val)
 
GPU_RT_STUB int64_t agg_sum_shared (int64_t *agg, const int64_t val)
 
GPU_RT_STUB int64_t agg_sum_if_shared (int64_t *agg, const int64_t val, const int8_t cond)
 
GPU_RT_STUB int64_t agg_sum_skip_val_shared (int64_t *agg, const int64_t val, const int64_t skip_val)
 
GPU_RT_STUB int64_t agg_sum_if_skip_val_shared (int64_t *agg, const int64_t val, const int64_t skip_val, const int8_t cond)
 
GPU_RT_STUB int32_t agg_sum_int32_shared (int32_t *agg, const int32_t val)
 
GPU_RT_STUB int32_t agg_sum_int32_skip_val_shared (int32_t *agg, const int32_t val, const int32_t skip_val)
 
GPU_RT_STUB void agg_sum_double_shared (int64_t *agg, const double val)
 
GPU_RT_STUB void agg_sum_double_skip_val_shared (int64_t *agg, const double val, const double skip_val)
 
GPU_RT_STUB void agg_sum_float_shared (int32_t *agg, const float val)
 
GPU_RT_STUB void agg_sum_float_skip_val_shared (int32_t *agg, const float val, const float skip_val)
 
GPU_RT_STUB int32_t agg_sum_if_int32_shared (int32_t *agg, const int32_t val, const int8_t cond)
 
GPU_RT_STUB int32_t agg_sum_if_int32_skip_val_shared (int32_t *agg, const int32_t val, const int32_t skip_val, const int8_t cond)
 
GPU_RT_STUB void agg_sum_if_double_shared (int64_t *agg, const double val, const int8_t cond)
 
GPU_RT_STUB void agg_sum_if_double_skip_val_shared (int64_t *agg, const double val, const double skip_val, const int8_t cond)
 
GPU_RT_STUB void agg_sum_if_float_shared (int32_t *agg, const float val, const int8_t cond)
 
GPU_RT_STUB void agg_sum_if_float_skip_val_shared (int32_t *agg, const float val, const float skip_val, const int8_t cond)
 
GPU_RT_STUB void force_sync ()
 
GPU_RT_STUB void sync_warp ()
 
GPU_RT_STUB void sync_warp_protected (int64_t thread_pos, int64_t row_count)
 
GPU_RT_STUB void sync_threadblock ()
 
GPU_RT_STUB void write_back_non_grouped_agg (int64_t *input_buffer, int64_t *output_buffer, const int32_t num_agg_cols)
 
RUNTIME_EXPORT NEVER_INLINE int32_t pos_start_impl (int32_t const *row_index_resume)
 
RUNTIME_EXPORT NEVER_INLINE int32_t group_buff_idx_impl ()
 
RUNTIME_EXPORT NEVER_INLINE int32_t pos_step_impl ()
 
GPU_RT_STUB int8_t thread_warp_idx (const int8_t warp_sz)
 
GPU_RT_STUB int64_t get_thread_index ()
 
GPU_RT_STUB int64_t * declare_dynamic_shared_memory ()
 
GPU_RT_STUB int64_t get_block_index ()
 
RUNTIME_EXPORT ALWAYS_INLINE void record_error_code (const int32_t err_code, int32_t *error_codes)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
get_error_code (int32_t *error_codes)
 
RUNTIME_EXPORT NEVER_INLINE
const int64_t * 
init_shared_mem_nop (const int64_t *groups_buffer, const int32_t groups_buffer_size)
 
RUNTIME_EXPORT NEVER_INLINE void write_back_nop (int64_t *dest, int64_t *src, const int32_t sz)
 
RUNTIME_EXPORT int64_t * init_shared_mem (const int64_t *global_groups_buffer, const int32_t groups_buffer_size)
 
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_gpu (int64_t *groups_buffer, const int64_t *init_vals, const uint32_t groups_buffer_entry_count, const uint32_t key_qw_count, const uint32_t agg_col_count, const bool keyless, const int8_t warp_size)
 
RUNTIME_EXPORT NEVER_INLINE void init_columnar_group_by_buffer_gpu (int64_t *groups_buffer, const int64_t *init_vals, const uint32_t groups_buffer_entry_count, const uint32_t key_qw_count, const uint32_t agg_col_count, const bool keyless, const bool blocks_share_memory, const int32_t frag_idx)
 
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_impl (int64_t *groups_buffer, const int64_t *init_vals, const uint32_t groups_buffer_entry_count, const uint32_t key_qw_count, const uint32_t agg_col_count, const bool keyless, const int8_t warp_size)
 
template<typename T >
ALWAYS_INLINE int64_t * get_matching_group_value (int64_t *groups_buffer, const uint32_t h, const T *key, const uint32_t key_count, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value (int64_t *groups_buffer, const uint32_t h, const int64_t *key, const uint32_t key_count, const uint32_t key_width, const uint32_t row_size_quad)
 
template<typename T >
ALWAYS_INLINE int32_t get_matching_group_value_columnar_slot (int64_t *groups_buffer, const uint32_t entry_count, const uint32_t h, const T *key, const uint32_t key_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
get_matching_group_value_columnar_slot (int64_t *groups_buffer, const uint32_t entry_count, const uint32_t h, const int64_t *key, const uint32_t key_count, const uint32_t key_width)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value_columnar (int64_t *groups_buffer, const uint32_t h, const int64_t *key, const uint32_t key_qw_count, const size_t entry_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value_perfect_hash (int64_t *groups_buffer, const uint32_t hashed_index, const int64_t *key, const uint32_t key_count, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_matching_group_value_perfect_hash_keyless (int64_t *groups_buffer, const uint32_t hashed_index, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE void set_matching_group_value_perfect_hash_columnar (int64_t *groups_buffer, const uint32_t hashed_index, const int64_t *key, const uint32_t key_count, const uint32_t entry_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_group_value_fast_keyless (int64_t *groups_buffer, const int64_t key, const int64_t min_key, const int64_t, const uint32_t row_size_quad)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t * 
get_group_value_fast_keyless_semiprivate (int64_t *groups_buffer, const int64_t key, const int64_t min_key, const int64_t, const uint32_t row_size_quad, const uint8_t thread_warp_idx, const uint8_t warp_size)
 
RUNTIME_EXPORT ALWAYS_INLINE
StringView 
string_pack (const int8_t *ptr, const int32_t len)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
char_length (const char *str, const int32_t str_len)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
char_length_nullable (const char *str, const int32_t str_len, const int32_t int_null)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
key_for_string_encoded (const int32_t str_id)
 
ALWAYS_INLINE DEVICE int32_t map_string_dict_id (const int32_t string_id, const int64_t translation_map_handle, const int32_t min_source_id)
 
ALWAYS_INLINE DEVICE double tree_model_reg_predict (const double *regressor_inputs, const int64_t decision_tree_table_handle, const int64_t decision_tree_offsets_handle, const int32_t num_regressors, const int32_t num_trees, const bool compute_avg, const double null_value)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE bool 
sample_ratio (const double proportion, const int64_t row_offset)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_reversed (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
width_bucket_nullable (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
int32_t 
width_bucket_reversed_nullable (const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_no_oob_check (const double target_value, const double lower_bound, const double scale_factor)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_reversed_no_oob_check (const double target_value, const double lower_bound, const double scale_factor)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_expr (const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_expr_nullable (const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE int32_t 
width_bucket_expr_no_oob_check (const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count)
 
RUNTIME_EXPORT ALWAYS_INLINE
int64_t 
row_number_window_func (const int64_t output_buff, const int64_t pos)
 
RUNTIME_EXPORT ALWAYS_INLINE double percent_window_func (const int64_t output_buff, const int64_t pos)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_double (const int64_t *agg)
 
RUNTIME_EXPORT ALWAYS_INLINE float load_float (const int32_t *agg)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_int (const int64_t *sum, const int64_t *count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_decimal (const int64_t *sum, const int64_t *count, const double null_val, const uint32_t scale)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_double (const int64_t *agg, const int64_t *count, const double null_val)
 
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_float (const int32_t *agg, const int32_t *count, const double null_val)
 
RUNTIME_EXPORT NEVER_INLINE void linear_probabilistic_count (uint8_t *bitmap, const uint32_t bitmap_bytes, const uint8_t *key_bytes, const uint32_t key_len)
 
RUNTIME_EXPORT NEVER_INLINE void query_stub_hoisted_literals (int32_t *error_codes, int32_t *total_matched, int64_t **out, const uint32_t frag_idx, const uint32_t *row_index_resume, const int8_t **col_buffers, const int8_t *literals, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, const int64_t *join_hash_tables, const int8_t *row_func_mgr)
 
RUNTIME_EXPORT void multifrag_query_hoisted_literals (int32_t *error_codes, int32_t *total_matched, int64_t **out, const uint32_t *num_fragments_ptr, const uint32_t *num_tables_ptr, const uint32_t *row_index_resume, const int8_t ***col_buffers, const int8_t *literals, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, const int64_t *join_hash_tables, const int8_t *row_func_mgr)
 
RUNTIME_EXPORT NEVER_INLINE void query_stub (int32_t *error_codes, int32_t *total_matched, int64_t **out, const uint32_t frag_idx, const uint32_t *row_index_resume, const int8_t **col_buffers, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, const int64_t *join_hash_tables, const int8_t *row_func_mgr)
 
RUNTIME_EXPORT void multifrag_query (int32_t *error_codes, int32_t *total_matched, int64_t **out, const uint32_t *num_fragments_ptr, const uint32_t *num_tables_ptr, const uint32_t *row_index_resume, const int8_t ***col_buffers, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, const int64_t *join_hash_tables, const int8_t *row_func_mgr)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE bool 
point_int32_is_null (int32_t *point)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE bool 
point_double_is_null (double *point)
 
RUNTIME_EXPORT ALWAYS_INLINE
DEVICE bool 
check_interrupt ()
 
RUNTIME_EXPORT bool check_interrupt_init (unsigned command)
 

Macro Definition Documentation

#define ADDR_T   int64_t

Definition at line 1569 of file RuntimeFunctions.cpp.

#define ADDR_T   int32_t

Definition at line 1569 of file RuntimeFunctions.cpp.

#define DATA_T   int64_t

Definition at line 1568 of file RuntimeFunctions.cpp.

#define DATA_T   int32_t

Definition at line 1568 of file RuntimeFunctions.cpp.

#define DATA_T   int16_t

Definition at line 1568 of file RuntimeFunctions.cpp.

#define DATA_T   int8_t

Definition at line 1568 of file RuntimeFunctions.cpp.

#define DATA_T   double

Definition at line 1568 of file RuntimeFunctions.cpp.

#define DATA_T   float

Definition at line 1568 of file RuntimeFunctions.cpp.

#define DEF_AGG_ID_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void agg_id_int##n(int##n##_t* agg, \
const int##n##_t val) { \
*agg = val; \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 1245 of file RuntimeFunctions.cpp.

#define DEF_AGG_MAX_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void agg_max_int##n(int##n##_t* agg, \
const int##n##_t val) { \
*agg = std::max(*agg, val); \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 1223 of file RuntimeFunctions.cpp.

#define DEF_AGG_MIN_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void agg_min_int##n(int##n##_t* agg, \
const int##n##_t val) { \
*agg = std::min(*agg, val); \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 1234 of file RuntimeFunctions.cpp.

#define DEF_ARITH_NULLABLE (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type opname##_##type##_nullable( \
const type lhs, const type rhs, const null_type null_val) { \
if (lhs != null_val && rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 46 of file RuntimeFunctions.cpp.

#define DEF_ARITH_NULLABLE_LHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type opname##_##type##_nullable_lhs( \
const type lhs, const type rhs, const null_type null_val) { \
if (lhs != null_val) { \
return lhs opsym rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 55 of file RuntimeFunctions.cpp.

#define DEF_ARITH_NULLABLE_RHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type opname##_##type##_nullable_rhs( \
const type lhs, const type rhs, const null_type null_val) { \
if (rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 64 of file RuntimeFunctions.cpp.

#define DEF_BINARY_NULLABLE_ALL_OPS (   type,
  null_type 
)

Definition at line 118 of file RuntimeFunctions.cpp.

#define DEF_CAST_NULLABLE (   from_type,
  to_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE to_type \
cast_##from_type##_to_##to_type##_nullable(const from_type operand, \
const from_type from_null_val, \
const to_type to_null_val) { \
return operand == from_null_val ? to_null_val : operand; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 262 of file RuntimeFunctions.cpp.

#define DEF_CAST_NULLABLE_BIDIR (   type1,
  type2 
)
Value:
DEF_CAST_NULLABLE(type1, type2) \
DEF_CAST_NULLABLE(type2, type1)
#define DEF_CAST_NULLABLE(from_type, to_type)

Definition at line 279 of file RuntimeFunctions.cpp.

#define DEF_CAST_SCALED_NULLABLE (   from_type,
  to_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE to_type \
cast_##from_type##_to_##to_type##_scaled_nullable(const from_type operand, \
const from_type from_null_val, \
const to_type to_null_val, \
const to_type divider) { \
return operand == from_null_val ? to_null_val : operand / divider; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 270 of file RuntimeFunctions.cpp.

#define DEF_CHECKED_SINGLE_AGG_ID_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id_int##n( \
int##n##_t* agg, const int##n##_t val, const int##n##_t null_val) { \
if (val == null_val) { \
return 0; \
} \
if (*agg == val) { \
return 0; \
} else if (*agg == null_val) { \
*agg = val; \
return 0; \
} else { \
/* see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES*/ \
return 15; \
} \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 1251 of file RuntimeFunctions.cpp.

#define DEF_CMP_NULLABLE (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int8_t opname##_##type##_nullable( \
const type lhs, \
const type rhs, \
const null_type null_val, \
const int8_t null_bool_val) { \
if (lhs != null_val && rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_bool_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 73 of file RuntimeFunctions.cpp.

#define DEF_CMP_NULLABLE_LHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int8_t opname##_##type##_nullable_lhs( \
const type lhs, \
const type rhs, \
const null_type null_val, \
const int8_t null_bool_val) { \
if (lhs != null_val) { \
return lhs opsym rhs; \
} \
return null_bool_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 85 of file RuntimeFunctions.cpp.

#define DEF_CMP_NULLABLE_RHS (   type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int8_t opname##_##type##_nullable_rhs( \
const type lhs, \
const type rhs, \
const null_type null_val, \
const int8_t null_bool_val) { \
if (rhs != null_val) { \
return lhs opsym rhs; \
} \
return null_bool_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 97 of file RuntimeFunctions.cpp.

#define DEF_COMPUTE_CURRENT_ROW_IDX_IN_FRAME (   value_type,
  oper_name 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int64_t \
compute_##value_type##_##oper_name##_current_row_idx_in_frame( \
const int64_t num_elems, \
const int64_t cur_row_idx, \
const value_type* col_buf, \
const int32_t* partition_rowid_buf, \
const int64_t* ordered_index_buf, \
const value_type null_val, \
const bool nulls_first, \
const int64_t null_start_pos, \
const int64_t null_end_pos) { \
return compute_current_row_idx_in_frame<value_type>(num_elems, \
cur_row_idx, \
col_buf, \
partition_rowid_buf, \
ordered_index_buf, \
null_val, \
nulls_first, \
null_start_pos, \
null_end_pos, \
std::oper_name<value_type>{}); \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 501 of file RuntimeFunctions.cpp.

#define DEF_COMPUTE_CURRENT_ROW_IDX_IN_FRAME_ALL_TYPES (   oper_name)
Value:
#define DEF_COMPUTE_CURRENT_ROW_IDX_IN_FRAME(value_type, oper_name)

Definition at line 524 of file RuntimeFunctions.cpp.

#define DEF_FILL_MISSING_VALUE (   col_type)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE col_type fill_##col_type##_missing_value( \
int64_t const cur_row_idx_in_frame, \
col_type const null_val, \
col_type* const col_buf, \
int64_t const num_elems_in_partition, \
int32_t* const partition_rowid_buf, \
int64_t* const ordered_index_buf, \
bool const is_forward_fill) { \
return fill_missing_value<col_type>(cur_row_idx_in_frame, \
null_val, \
col_buf, \
num_elems_in_partition, \
partition_rowid_buf, \
ordered_index_buf, \
is_forward_fill); \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1103 of file RuntimeFunctions.cpp.

#define DEF_GET_VALUE_IN_FRAME (   col_type,
  logical_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE logical_type \
get_##col_type##_value_##logical_type##_type_in_frame( \
const int64_t target_row_idx_in_frame, \
const int64_t frame_start_offset, \
const int64_t frame_end_offset, \
const col_type* col_buf, \
const int32_t* partition_rowid_buf, \
const int64_t* ordered_index_buf, \
const logical_type logical_null_val, \
const logical_type col_null_val) { \
return get_value_in_window_frame<col_type, logical_type>(target_row_idx_in_frame, \
frame_start_offset, \
frame_end_offset, \
col_buf, \
partition_rowid_buf, \
ordered_index_buf, \
logical_null_val, \
col_null_val); \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 722 of file RuntimeFunctions.cpp.

#define DEF_HANDLE_NULL_FOR_WINDOW_FRAMING_AGG (   agg_type,
  null_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE agg_type \
handle_null_val_##agg_type##_##null_type##_window_framing_agg( \
agg_type res, null_type agg_null_val, agg_type input_col_null_val) { \
if (res == agg_null_val) { \
return input_col_null_val; \
} \
return res; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1060 of file RuntimeFunctions.cpp.

#define DEF_MAP_STRING_TO_DATUM (   value_type,
  value_name 
)
Value:
extern "C" ALWAYS_INLINE DEVICE value_type map_string_to_datum_##value_name( \
const int32_t string_id, \
const int64_t translation_map_handle, \
const int32_t min_source_id) { \
const Datum* translation_map = \
reinterpret_cast<const Datum*>(translation_map_handle); \
const Datum& out_datum = translation_map[string_id - min_source_id]; \
return out_datum.value_name##val; \
}
#define DEVICE
#define ALWAYS_INLINE
Definition: Datum.h:71

Definition at line 179 of file RuntimeFunctions.cpp.

#define DEF_RANGE_MODE_FRAME_LOWER_BOUND (   target_val_type,
  col_type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int64_t \
range_mode_##target_val_type##_##col_type##_##null_type##_##opname##_frame_lower_bound( \
const int64_t num_elems, \
const target_val_type target_value, \
const col_type* col_buf, \
const int32_t* partition_rowid_buf, \
const int64_t* ordered_index_buf, \
const int64_t frame_bound_val, \
const null_type null_val, \
const bool nulls_first, \
const int64_t null_start_pos, \
const int64_t null_end_pos) { \
if (target_value == null_val) { \
return null_start_pos; \
} \
target_val_type new_val = target_value opsym frame_bound_val; \
col_type, \
null_type>( \
num_elems, \
new_val, \
col_buf, \
partition_rowid_buf, \
ordered_index_buf, \
null_val, \
nulls_first, \
null_start_pos, \
null_end_pos); \
}
#define RUNTIME_EXPORT
int64_t compute_lower_bound_from_ordered_partition_index(const int64_t num_elems, const TARGET_VAL_TYPE target_val, const COL_TYPE *col_buf, const int32_t *partition_rowid_buf, const int64_t *ordered_index_buf, const NULL_TYPE null_val, const bool nulls_first, const int64_t null_start_offset, const int64_t null_end_offset)
#define ALWAYS_INLINE

Definition at line 566 of file RuntimeFunctions.cpp.

#define DEF_RANGE_MODE_FRAME_UPPER_BOUND (   target_val_type,
  col_type,
  null_type,
  opname,
  opsym 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE int64_t \
range_mode_##target_val_type##_##col_type##_##null_type##_##opname##_frame_upper_bound( \
const int64_t num_elems, \
const target_val_type target_value, \
const col_type* col_buf, \
const int32_t* partition_rowid_buf, \
const int64_t* ordered_index_buf, \
const int64_t frame_bound_val, \
const null_type null_val, \
const bool nulls_first, \
const int64_t null_start_pos, \
const int64_t null_end_pos) { \
if (target_value == null_val) { \
return null_end_pos; \
} \
target_val_type new_val = target_value opsym frame_bound_val; \
col_type, \
null_type>( \
num_elems, \
new_val, \
col_buf, \
partition_rowid_buf, \
ordered_index_buf, \
null_val, \
nulls_first, \
null_start_pos, \
null_end_pos); \
}
int64_t compute_upper_bound_from_ordered_partition_index(const int64_t num_elems, const TARGET_VAL_TYPE target_val, const COL_TYPE *col_buf, const int32_t *partition_rowid_buf, const int64_t *ordered_index_buf, const NULL_TYPE null_val, const bool nulls_first, const int64_t null_start_offset, const int64_t null_end_offset)
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 647 of file RuntimeFunctions.cpp.

#define DEF_ROUND_NULLABLE (   from_type,
  to_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE to_type \
cast_##from_type##_to_##to_type##_nullable(const from_type operand, \
const from_type from_null_val, \
const to_type to_null_val) { \
return operand == from_null_val \
? to_null_val \
: static_cast<to_type>(operand + (operand < from_type(0) \
? from_type(-0.5) \
: from_type(0.5))); \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 283 of file RuntimeFunctions.cpp.

#define DEF_SAFE_DIV_NULLABLE (   type,
  null_type,
  opname 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type safe_div_##type( \
const type lhs, const type rhs, const null_type null_val) { \
if (lhs != null_val && rhs != null_val && rhs != 0) { \
return lhs / rhs; \
} \
return null_val; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 109 of file RuntimeFunctions.cpp.

#define DEF_SEARCH_AGGREGATION_TREE (   agg_value_type)

Definition at line 901 of file RuntimeFunctions.cpp.

#define DEF_SEARCH_DERIVED_AGGREGATION_TREE (   agg_value_type)

Definition at line 1019 of file RuntimeFunctions.cpp.

#define DEF_SHARED_AGG_RET_STUBS (   base_agg_func)

Definition at line 1598 of file RuntimeFunctions.cpp.

#define DEF_SHARED_AGG_STUBS (   base_agg_func)
Value:
extern "C" GPU_RT_STUB void base_agg_func##_shared(int64_t* agg, const int64_t val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_skip_val_shared( \
int64_t* agg, const int64_t val, const int64_t skip_val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_int32_shared(int32_t* agg, \
const int32_t val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_int16_shared(int16_t* agg, \
const int16_t val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_int8_shared(int8_t* agg, \
const int8_t val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_int32_skip_val_shared( \
int32_t* agg, const int32_t val, const int32_t skip_val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_double_shared(int64_t* agg, \
const double val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_double_skip_val_shared( \
int64_t* agg, const double val, const double skip_val) {} \
extern "C" GPU_RT_STUB void base_agg_func##_float_shared(int32_t* agg, \
const float val) {} \
\
extern "C" GPU_RT_STUB void base_agg_func##_float_skip_val_shared( \
int32_t* agg, const float val, const float skip_val) {}
#define GPU_RT_STUB

Definition at line 1637 of file RuntimeFunctions.cpp.

#define DEF_SKIP_AGG (   base_agg_func)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void base_agg_func##_skip_val( \
DATA_T* agg, const DATA_T val, const DATA_T skip_val) { \
if (val != skip_val) { \
const DATA_T old_agg = *agg; \
if (old_agg != skip_val) { \
base_agg_func(agg, val); \
} else { \
*agg = val; \
} \
} \
}
#define DATA_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1538 of file RuntimeFunctions.cpp.

#define DEF_SKIP_AGG (   base_agg_func)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void base_agg_func##_skip_val( \
ADDR_T* agg, const DATA_T val, const DATA_T skip_val) { \
if (val != skip_val) { \
const ADDR_T old_agg = *agg; \
if (old_agg != *reinterpret_cast<const ADDR_T*>(may_alias_ptr(&skip_val))) { \
base_agg_func(agg, val); \
} else { \
*agg = *reinterpret_cast<const ADDR_T*>(may_alias_ptr(&val)); \
} \
} \
}
#define DATA_T
#define ADDR_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1538 of file RuntimeFunctions.cpp.

#define DEF_SKIP_AGG_ADD (   base_agg_func)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void base_agg_func##_skip_val( \
DATA_T* agg, const DATA_T val, const DATA_T skip_val) { \
if (val != skip_val) { \
base_agg_func(agg, val); \
} \
}
#define DATA_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1371 of file RuntimeFunctions.cpp.

#define DEF_SKIP_IF_AGG (   skip_agg_func,
  base_agg_func 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void skip_agg_func##_skip_val( \
ADDR_T* agg, const DATA_T val, const DATA_T skip_val, const int8_t cond) { \
if (cond) { \
base_agg_func##_skip_val(agg, val, skip_val); \
} \
}
#define DATA_T
#define ADDR_T
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 1551 of file RuntimeFunctions.cpp.

#define DEF_UMINUS_NULLABLE (   type,
  null_type 
)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE type uminus_##type##_nullable( \
const type operand, const null_type null_val) { \
return operand == null_val ? null_val : -operand; \
}
#define RUNTIME_EXPORT
#define ALWAYS_INLINE

Definition at line 247 of file RuntimeFunctions.cpp.

#define DEF_WRITE_PROJECTION_INT (   n)
Value:
extern "C" RUNTIME_EXPORT ALWAYS_INLINE void write_projection_int##n( \
int8_t* slot_ptr, const int##n##_t val, const int64_t init_val) { \
if (val != init_val) { \
*reinterpret_cast<int##n##_t*>(slot_ptr) = val; \
} \
}
#define RUNTIME_EXPORT
constexpr double n
Definition: Utm.h:38
#define ALWAYS_INLINE

Definition at line 1279 of file RuntimeFunctions.cpp.

#define GPU_RT_STUB   NEVER_INLINE __attribute__((optnone))

Definition at line 381 of file RuntimeFunctions.cpp.

Function Documentation

RUNTIME_EXPORT NEVER_INLINE void agg_approximate_count_distinct ( int64_t *  agg,
const int64_t  key,
const uint32_t  b 
)

Definition at line 394 of file RuntimeFunctions.cpp.

References get_rank(), and MurmurHash64A().

394  {
395  const uint64_t hash = MurmurHash64A(&key, sizeof(key), 0);
396  const uint32_t index = hash >> (64 - b);
397  const uint8_t rank = get_rank(hash << b, 64 - b);
398  uint8_t* M = reinterpret_cast<uint8_t*>(*agg);
399  M[index] = std::max(M[index], rank);
400 }
FORCE_INLINE uint8_t get_rank(uint64_t x, uint32_t b)
RUNTIME_EXPORT NEVER_INLINE DEVICE uint64_t MurmurHash64A(const void *key, int len, uint64_t seed)
Definition: MurmurHash.cpp:27

+ Here is the call graph for this function:

GPU_RT_STUB void agg_approximate_count_distinct_gpu ( int64_t *  ,
const int64_t  ,
const uint32_t  ,
const int64_t  ,
const int64_t   
)

Definition at line 402 of file RuntimeFunctions.cpp.

406  {}
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count ( uint64_t *  agg,
const int64_t   
)

Definition at line 362 of file RuntimeFunctions.cpp.

Referenced by agg_count_skip_val(), and anonymous_namespace{GroupByAndAggregate.cpp}::get_agg_count().

362  {
363  return (*agg)++;
364 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap ( int64_t *  agg,
const int64_t  val,
const int64_t  min_val,
const int64_t  bucket_size 
)

Definition at line 366 of file RuntimeFunctions.cpp.

Referenced by agg_count_distinct_bitmap_skip_val(), WindowFunctionContext::fillPartitionEnd(), WindowFunctionContext::fillPartitionStart(), anonymous_namespace{WindowContext.cpp}::index_to_partition_end(), and InValuesBitmap::InValuesBitmap().

370  {
371  uint64_t bitmap_idx = val - min_val;
372  if (1 < bucket_size) {
373  bitmap_idx /= static_cast<uint64_t>(bucket_size);
374  }
375  reinterpret_cast<int8_t*>(*agg)[bitmap_idx >> 3] |= (1 << (bitmap_idx & 7));
376 }

+ Here is the caller graph for this function:

GPU_RT_STUB void agg_count_distinct_bitmap_gpu ( int64_t *  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const uint64_t  ,
const uint64_t   
)

Definition at line 384 of file RuntimeFunctions.cpp.

391  {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap_skip_val ( int64_t *  agg,
const int64_t  val,
const int64_t  min_val,
const int64_t  bucket_size,
const int64_t  skip_val 
)

Definition at line 1179 of file RuntimeFunctions.cpp.

References agg_count_distinct_bitmap().

1184  {
1185  if (val != skip_val) {
1186  agg_count_distinct_bitmap(agg, val, min_val, bucket_size);
1187  }
1188 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_count_distinct_bitmap(int64_t *agg, const int64_t val, const int64_t min_val, const int64_t bucket_size)

+ Here is the call graph for this function:

GPU_RT_STUB void agg_count_distinct_bitmap_skip_val_gpu ( int64_t *  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const int64_t  ,
const uint64_t  ,
const uint64_t   
)

Definition at line 1190 of file RuntimeFunctions.cpp.

1198  {}
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_double ( uint64_t *  agg,
const double  val 
)

Definition at line 1417 of file RuntimeFunctions.cpp.

Referenced by agg_count_double_skip_val().

1418  {
1419  return (*agg)++;
1420 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_double_skip_val ( uint64_t *  agg,
const double  val,
const double  skip_val 
)

Definition at line 1523 of file RuntimeFunctions.cpp.

References agg_count_double().

1523  {
1524  if (val != skip_val) {
1525  return agg_count_double(agg, val);
1526  }
1527  return *agg;
1528 }
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_double(uint64_t *agg, const double val)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_float ( uint32_t *  agg,
const float  val 
)

Definition at line 1470 of file RuntimeFunctions.cpp.

Referenced by agg_count_float_skip_val().

1471  {
1472  return (*agg)++;
1473 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_float_skip_val ( uint32_t *  agg,
const float  val,
const float  skip_val 
)

Definition at line 1531 of file RuntimeFunctions.cpp.

References agg_count_float().

1531  {
1532  if (val != skip_val) {
1533  return agg_count_float(agg, val);
1534  }
1535  return *agg;
1536 }
RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_float(uint32_t *agg, const float val)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_if ( uint64_t *  agg,
const int64_t  cond 
)

Definition at line 1334 of file RuntimeFunctions.cpp.

Referenced by agg_count_if_skip_val().

1335  {
1336  return cond ? (*agg)++ : *agg;
1337 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_if_int32 ( uint32_t *  agg,
const int32_t  cond 
)

Definition at line 1205 of file RuntimeFunctions.cpp.

Referenced by agg_count_if_int32_skip_val().

1206  {
1207  return cond ? (*agg)++ : *agg;
1208 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_if_int32_skip_val ( uint32_t *  agg,
const int32_t  cond,
const int32_t  skip_val 
)

Definition at line 1364 of file RuntimeFunctions.cpp.

References agg_count_if_int32().

1364  {
1365  if (cond != skip_val) {
1366  return agg_count_if_int32(agg, cond);
1367  }
1368  return *agg;
1369 }
RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_if_int32(uint32_t *agg, const int32_t cond)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_if_skip_val ( uint64_t *  agg,
const int64_t  cond,
const int64_t  skip_val 
)

Definition at line 1348 of file RuntimeFunctions.cpp.

References agg_count_if().

1348  {
1349  if (cond != skip_val) {
1350  return agg_count_if(agg, cond);
1351  }
1352  return *agg;
1353 }
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_if(uint64_t *agg, const int64_t cond)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_int32 ( uint32_t *  agg,
const int32_t   
)

Definition at line 1200 of file RuntimeFunctions.cpp.

Referenced by agg_count_int32_skip_val().

1201  {
1202  return (*agg)++;
1203 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_int32_skip_val ( uint32_t *  agg,
const int32_t  val,
const int32_t  skip_val 
)

Definition at line 1356 of file RuntimeFunctions.cpp.

References agg_count_int32().

1356  {
1357  if (val != skip_val) {
1358  return agg_count_int32(agg, val);
1359  }
1360  return *agg;
1361 }
RUNTIME_EXPORT ALWAYS_INLINE uint32_t agg_count_int32(uint32_t *agg, const int32_t)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count_skip_val ( uint64_t *  agg,
const int64_t  val,
const int64_t  skip_val 
)

Definition at line 1340 of file RuntimeFunctions.cpp.

References agg_count().

1340  {
1341  if (val != skip_val) {
1342  return agg_count(agg, val);
1343  }
1344  return *agg;
1345 }
RUNTIME_EXPORT ALWAYS_INLINE uint64_t agg_count(uint64_t *agg, const int64_t)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_id ( int64_t *  agg,
const int64_t  val 
)

Definition at line 1148 of file RuntimeFunctions.cpp.

1148  {
1149  *agg = val;
1150 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_double ( int64_t *  agg,
const double  val 
)

Definition at line 1448 of file RuntimeFunctions.cpp.

1449  {
1450  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&val)));
1451 }
GPU_RT_STUB void agg_id_double_shared_slow ( int64_t *  agg,
const double *  val 
)

Definition at line 1729 of file RuntimeFunctions.cpp.

1729 {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_id_float ( int32_t *  agg,
const float  val 
)

Definition at line 1501 of file RuntimeFunctions.cpp.

1501  {
1502  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&val)));
1503 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t* agg_id_varlen ( int8_t *  varlen_buffer,
const int64_t  offset,
const int8_t *  value,
const int64_t  size_bytes 
)

Definition at line 1152 of file RuntimeFunctions.cpp.

1155  {
1156  for (auto i = 0; i < size_bytes; i++) {
1157  varlen_buffer[offset + i] = value[i];
1158  }
1159  return &varlen_buffer[offset];
1160 }
GPU_RT_STUB int8_t* agg_id_varlen_shared ( int8_t *  varlen_buffer,
const int64_t  offset,
const int8_t *  value,
const int64_t  size_bytes 
)

Definition at line 1669 of file RuntimeFunctions.cpp.

1672  {
1673  return nullptr;
1674 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_if_sum_float ( int32_t *  agg,
const float  val,
const int8_t  cond 
)

Definition at line 1481 of file RuntimeFunctions.cpp.

References agg_sum_float().

1483  {
1484  if (cond) {
1485  agg_sum_float(agg, val);
1486  }
1487 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_float(int32_t *agg, const float val)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_max ( int64_t *  agg,
const int64_t  val 
)

Definition at line 1140 of file RuntimeFunctions.cpp.

1140  {
1141  *agg = std::max(*agg, val);
1142 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_double ( int64_t *  agg,
const double  val 
)

Definition at line 1436 of file RuntimeFunctions.cpp.

1437  {
1438  const auto r = std::max(*reinterpret_cast<const double*>(agg), val);
1439  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&r)));
1440 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_max_float ( int32_t *  agg,
const float  val 
)

Definition at line 1489 of file RuntimeFunctions.cpp.

1490  {
1491  const auto r = std::max(*reinterpret_cast<const float*>(agg), val);
1492  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&r)));
1493 }
GPU_RT_STUB void agg_max_int16_skip_val_shared ( int16_t *  agg,
const int16_t  val,
const int16_t  skip_val 
)

Definition at line 1713 of file RuntimeFunctions.cpp.

1715  {}
GPU_RT_STUB void agg_max_int8_skip_val_shared ( int8_t *  agg,
const int8_t  val,
const int8_t  skip_val 
)

Definition at line 1717 of file RuntimeFunctions.cpp.

1719  {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_min ( int64_t *  agg,
const int64_t  val 
)

Definition at line 1144 of file RuntimeFunctions.cpp.

1144  {
1145  *agg = std::min(*agg, val);
1146 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_double ( int64_t *  agg,
const double  val 
)

Definition at line 1442 of file RuntimeFunctions.cpp.

1443  {
1444  const auto r = std::min(*reinterpret_cast<const double*>(agg), val);
1445  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&r)));
1446 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_min_float ( int32_t *  agg,
const float  val 
)

Definition at line 1495 of file RuntimeFunctions.cpp.

1496  {
1497  const auto r = std::min(*reinterpret_cast<const float*>(agg), val);
1498  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&r)));
1499 }
GPU_RT_STUB void agg_min_int16_skip_val_shared ( int16_t *  agg,
const int16_t  val,
const int16_t  skip_val 
)

Definition at line 1721 of file RuntimeFunctions.cpp.

1723  {}
GPU_RT_STUB void agg_min_int8_skip_val_shared ( int8_t *  agg,
const int8_t  val,
const int8_t  skip_val 
)

Definition at line 1725 of file RuntimeFunctions.cpp.

1727  {}
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum ( int64_t *  agg,
const int64_t  val 
)

Definition at line 1128 of file RuntimeFunctions.cpp.

Referenced by agg_sum_if(), and agg_sum_skip_val().

1128  {
1129  const auto old = *agg;
1130  *agg += val;
1131  return old;
1132 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_double ( int64_t *  agg,
const double  val 
)

Definition at line 1422 of file RuntimeFunctions.cpp.

Referenced by agg_sum_if_double().

1423  {
1424  const auto r = *reinterpret_cast<const double*>(agg) + val;
1425  *agg = *reinterpret_cast<const int64_t*>(may_alias_ptr(&r));
1426 }

+ Here is the caller graph for this function:

GPU_RT_STUB void agg_sum_double_shared ( int64_t *  agg,
const double  val 
)

Definition at line 1763 of file RuntimeFunctions.cpp.

1763 {}
GPU_RT_STUB void agg_sum_double_skip_val_shared ( int64_t *  agg,
const double  val,
const double  skip_val 
)

Definition at line 1765 of file RuntimeFunctions.cpp.

1767  {}
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_float ( int32_t *  agg,
const float  val 
)

Definition at line 1475 of file RuntimeFunctions.cpp.

Referenced by agg_if_sum_float().

1476  {
1477  const auto r = *reinterpret_cast<const float*>(agg) + val;
1478  *agg = *reinterpret_cast<const int32_t*>(may_alias_ptr(&r));
1479 }

+ Here is the caller graph for this function:

GPU_RT_STUB void agg_sum_float_shared ( int32_t *  agg,
const float  val 
)

Definition at line 1768 of file RuntimeFunctions.cpp.

1768 {}
GPU_RT_STUB void agg_sum_float_skip_val_shared ( int32_t *  agg,
const float  val,
const float  skip_val 
)

Definition at line 1770 of file RuntimeFunctions.cpp.

1772  {}
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum_if ( int64_t *  agg,
const int64_t  val,
const int8_t  cond 
)

Definition at line 1134 of file RuntimeFunctions.cpp.

References agg_sum().

1136  {
1137  return cond ? agg_sum(agg, val) : *agg;
1138 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum(int64_t *agg, const int64_t val)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_if_double ( int64_t *  agg,
const double  val,
const int8_t  cond 
)

Definition at line 1428 of file RuntimeFunctions.cpp.

References agg_sum_double().

1430  {
1431  if (cond) {
1432  agg_sum_double(agg, val);
1433  }
1434 }
RUNTIME_EXPORT ALWAYS_INLINE void agg_sum_double(int64_t *agg, const double val)

+ Here is the call graph for this function:

GPU_RT_STUB void agg_sum_if_double_shared ( int64_t *  agg,
const double  val,
const int8_t  cond 
)

Definition at line 1787 of file RuntimeFunctions.cpp.

1789  {}
GPU_RT_STUB void agg_sum_if_double_skip_val_shared ( int64_t *  agg,
const double  val,
const double  skip_val,
const int8_t  cond 
)

Definition at line 1791 of file RuntimeFunctions.cpp.

1794  {}
GPU_RT_STUB void agg_sum_if_float_shared ( int32_t *  agg,
const float  val,
const int8_t  cond 
)

Definition at line 1795 of file RuntimeFunctions.cpp.

1797  {}
GPU_RT_STUB void agg_sum_if_float_skip_val_shared ( int32_t *  agg,
const float  val,
const float  skip_val,
const int8_t  cond 
)

Definition at line 1799 of file RuntimeFunctions.cpp.

1802  {}
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_if_int32 ( int32_t *  agg,
const int32_t  val,
const int8_t  cond 
)

Definition at line 1217 of file RuntimeFunctions.cpp.

References agg_sum_int32().

1219  {
1220  return cond ? agg_sum_int32(agg, val) : *agg;
1221 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32(int32_t *agg, const int32_t val)

+ Here is the call graph for this function:

GPU_RT_STUB int32_t agg_sum_if_int32_shared ( int32_t *  agg,
const int32_t  val,
const int8_t  cond 
)

Definition at line 1774 of file RuntimeFunctions.cpp.

1776  {
1777  return 0;
1778 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_if_int32_skip_val ( int32_t *  agg,
const int32_t  val,
const int32_t  skip_val,
const int8_t  cond 
)

Definition at line 1327 of file RuntimeFunctions.cpp.

References agg_sum_int32_skip_val().

1330  {
1331  return cond ? agg_sum_int32_skip_val(agg, val, skip_val) : *agg;
1332 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32_skip_val(int32_t *agg, const int32_t val, const int32_t skip_val)

+ Here is the call graph for this function:

GPU_RT_STUB int32_t agg_sum_if_int32_skip_val_shared ( int32_t *  agg,
const int32_t  val,
const int32_t  skip_val,
const int8_t  cond 
)

Definition at line 1780 of file RuntimeFunctions.cpp.

1783  {
1784  return 0;
1785 }
GPU_RT_STUB int64_t agg_sum_if_shared ( int64_t *  agg,
const int64_t  val,
const int8_t  cond 
)

Definition at line 1735 of file RuntimeFunctions.cpp.

1737  {
1738  return 0;
1739 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum_if_skip_val ( int64_t *  agg,
const int64_t  val,
const int64_t  skip_val,
const int8_t  cond 
)

Definition at line 1319 of file RuntimeFunctions.cpp.

References agg_sum_skip_val().

1322  {
1323  return cond ? agg_sum_skip_val(agg, val, skip_val) : *agg;
1324 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum_skip_val(int64_t *agg, const int64_t val, const int64_t skip_val)

+ Here is the call graph for this function:

GPU_RT_STUB int64_t agg_sum_if_skip_val_shared ( int64_t *  agg,
const int64_t  val,
const int64_t  skip_val,
const int8_t  cond 
)

Definition at line 1747 of file RuntimeFunctions.cpp.

1750  {
1751  return 0;
1752 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32 ( int32_t *  agg,
const int32_t  val 
)

Definition at line 1210 of file RuntimeFunctions.cpp.

Referenced by agg_sum_if_int32(), and agg_sum_int32_skip_val().

1211  {
1212  const auto old = *agg;
1213  *agg += val;
1214  return old;
1215 }

+ Here is the caller graph for this function:

GPU_RT_STUB int32_t agg_sum_int32_shared ( int32_t *  agg,
const int32_t  val 
)

Definition at line 1753 of file RuntimeFunctions.cpp.

1753  {
1754  return 0;
1755 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32_skip_val ( int32_t *  agg,
const int32_t  val,
const int32_t  skip_val 
)

Definition at line 1306 of file RuntimeFunctions.cpp.

References agg_sum_int32().

Referenced by agg_sum_if_int32_skip_val().

1306  {
1307  const auto old = *agg;
1308  if (val != skip_val) {
1309  if (old != skip_val) {
1310  return agg_sum_int32(agg, val);
1311  } else {
1312  *agg = val;
1313  }
1314  }
1315  return old;
1316 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t agg_sum_int32(int32_t *agg, const int32_t val)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

GPU_RT_STUB int32_t agg_sum_int32_skip_val_shared ( int32_t *  agg,
const int32_t  val,
const int32_t  skip_val 
)

Definition at line 1757 of file RuntimeFunctions.cpp.

1759  {
1760  return 0;
1761 }
GPU_RT_STUB int64_t agg_sum_shared ( int64_t *  agg,
const int64_t  val 
)

Definition at line 1731 of file RuntimeFunctions.cpp.

1731  {
1732  return 0;
1733 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum_skip_val ( int64_t *  agg,
const int64_t  val,
const int64_t  skip_val 
)

Definition at line 1291 of file RuntimeFunctions.cpp.

References agg_sum().

Referenced by agg_sum_if_skip_val(), and Executor::reduceResults().

1293  {
1294  const auto old = *agg;
1295  if (val != skip_val) {
1296  if (old != skip_val) {
1297  return agg_sum(agg, val);
1298  } else {
1299  *agg = val;
1300  }
1301  }
1302  return old;
1303 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t agg_sum(int64_t *agg, const int64_t val)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

GPU_RT_STUB int64_t agg_sum_skip_val_shared ( int64_t *  agg,
const int64_t  val,
const int64_t  skip_val 
)

Definition at line 1741 of file RuntimeFunctions.cpp.

1743  {
1744  return 0;
1745 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t bit_is_set ( const int8_t *  bitset,
const int64_t  val,
const int64_t  min_val,
const int64_t  max_val,
const int64_t  null_val,
const int8_t  null_bool_val 
)

Definition at line 408 of file RuntimeFunctions.cpp.

413  {
414  if (val == null_val) {
415  return null_bool_val;
416  }
417  if (val < min_val || val > max_val) {
418  return 0;
419  }
420  if (!bitset) {
421  return 0;
422  }
423  const uint64_t bitmap_idx = val - min_val;
424  return bitset[bitmap_idx >> 3] & (1 << (bitmap_idx & 7)) ? 1 : 0;
425 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t char_length ( const char *  str,
const int32_t  str_len 
)

Definition at line 2136 of file RuntimeFunctions.cpp.

Referenced by ScalarExprVisitor< std::set< shared::TableKey > >::visit().

2136  {
2137  return str_len;
2138 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t char_length_nullable ( const char *  str,
const int32_t  str_len,
const int32_t  int_null 
)

Definition at line 2141 of file RuntimeFunctions.cpp.

2141  {
2142  if (!str) {
2143  return int_null;
2144  }
2145  return str_len;
2146 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE bool check_interrupt ( )

Definition at line 2516 of file RuntimeFunctions.cpp.

References check_interrupt_init(), INT_CHECK, and runtime_interrupt_flag.

2516  {
2517  if (check_interrupt_init(static_cast<unsigned>(INT_CHECK))) {
2518  return true;
2519  }
2520  return false;
2521 }
RUNTIME_EXPORT bool check_interrupt_init(unsigned command)

+ Here is the call graph for this function:

RUNTIME_EXPORT bool check_interrupt_init ( unsigned  command)

Definition at line 2523 of file RuntimeFunctions.cpp.

References INT_ABORT, INT_CHECK, INT_RESET, and runtime_interrupt_flag.

Referenced by check_interrupt(), Executor::interrupt(), and Executor::resetInterrupt().

2523  {
2524  static std::atomic_bool runtime_interrupt_flag{false};
2525 
2526  if (command == static_cast<unsigned>(INT_CHECK)) {
2527  if (runtime_interrupt_flag.load()) {
2528  return true;
2529  }
2530  return false;
2531  }
2532  if (command == static_cast<unsigned>(INT_ABORT)) {
2533  runtime_interrupt_flag.store(true);
2534  return false;
2535  }
2536  if (command == static_cast<unsigned>(INT_RESET)) {
2537  runtime_interrupt_flag.store(false);
2538  return false;
2539  }
2540  return false;
2541 }
__device__ int32_t runtime_interrupt_flag
Definition: cuda_mapd_rt.cu:95

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id ( int64_t *  agg,
const int64_t  val,
const int64_t  null_val 
)

Definition at line 1163 of file RuntimeFunctions.cpp.

1163  {
1164  if (val == null_val) {
1165  return 0;
1166  }
1167 
1168  if (*agg == val) {
1169  return 0;
1170  } else if (*agg == null_val) {
1171  *agg = val;
1172  return 0;
1173  } else {
1174  // see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES
1175  return 15;
1176  }
1177 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id_double ( int64_t *  agg,
const double  val,
const double  null_val 
)

Definition at line 1454 of file RuntimeFunctions.cpp.

1454  {
1455  if (val == null_val) {
1456  return 0;
1457  }
1458 
1459  if (*agg == *(reinterpret_cast<const int64_t*>(may_alias_ptr(&val)))) {
1460  return 0;
1461  } else if (*agg == *(reinterpret_cast<const int64_t*>(may_alias_ptr(&null_val)))) {
1462  *agg = *(reinterpret_cast<const int64_t*>(may_alias_ptr(&val)));
1463  return 0;
1464  } else {
1465  // see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES
1466  return 15;
1467  }
1468 }
GPU_RT_STUB int32_t checked_single_agg_id_double_shared ( int64_t *  agg,
const double  val,
const double  null_val 
)

Definition at line 1701 of file RuntimeFunctions.cpp.

1703  {
1704  return 0;
1705 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t checked_single_agg_id_float ( int32_t *  agg,
const float  val,
const float  null_val 
)

Definition at line 1506 of file RuntimeFunctions.cpp.

1506  {
1507  if (val == null_val) {
1508  return 0;
1509  }
1510 
1511  if (*agg == *(reinterpret_cast<const int32_t*>(may_alias_ptr(&val)))) {
1512  return 0;
1513  } else if (*agg == *(reinterpret_cast<const int32_t*>(may_alias_ptr(&null_val)))) {
1514  *agg = *(reinterpret_cast<const int32_t*>(may_alias_ptr(&val)));
1515  return 0;
1516  } else {
1517  // see Execute::ERR_SINGLE_VALUE_FOUND_MULTIPLE_VALUES
1518  return 15;
1519  }
1520 }
GPU_RT_STUB int32_t checked_single_agg_id_float_shared ( int32_t *  agg,
const float  val,
const float  null_val 
)

Definition at line 1707 of file RuntimeFunctions.cpp.

1709  {
1710  return 0;
1711 }
GPU_RT_STUB int32_t checked_single_agg_id_int16_shared ( int16_t *  agg,
const int16_t  val,
const int16_t  null_val 
)

Definition at line 1689 of file RuntimeFunctions.cpp.

1691  {
1692  return 0;
1693 }
GPU_RT_STUB int32_t checked_single_agg_id_int32_shared ( int32_t *  agg,
const int32_t  val,
const int32_t  null_val 
)

Definition at line 1683 of file RuntimeFunctions.cpp.

1685  {
1686  return 0;
1687 }
GPU_RT_STUB int32_t checked_single_agg_id_int8_shared ( int8_t *  agg,
const int8_t  val,
const int8_t  null_val 
)

Definition at line 1694 of file RuntimeFunctions.cpp.

1696  {
1697  return 0;
1698 }
GPU_RT_STUB int32_t checked_single_agg_id_shared ( int64_t *  agg,
const int64_t  val,
const int64_t  null_val 
)

Definition at line 1676 of file RuntimeFunctions.cpp.

1678  {
1679  return 0;
1680 }
template<typename T , typename Comparator >
int64_t compute_current_row_idx_in_frame ( const int64_t  num_elems,
const int64_t  cur_row_idx,
const T *  col_buf,
const int32_t *  partition_rowid_buf,
const int64_t *  ordered_index_buf,
const T  null_val,
const bool  nulls_first,
const int64_t  null_start_pos,
const int64_t  null_end_pos,
Comparator  cmp 
)
inline

Definition at line 457 of file RuntimeFunctions.cpp.

References get_valid_buf_end_pos(), and get_valid_buf_start_pos().

466  {
467  const auto target_value = col_buf[cur_row_idx];
468  if (target_value == null_val) {
469  for (int64_t target_offset = null_start_pos; target_offset < null_end_pos;
470  target_offset++) {
471  const auto candidate_offset = partition_rowid_buf[ordered_index_buf[target_offset]];
472  if (candidate_offset == cur_row_idx) {
473  return target_offset;
474  }
475  }
476  }
477  auto const modified_null_end_pos = nulls_first ? null_end_pos - 1 : null_end_pos;
478  int64_t l = get_valid_buf_start_pos(null_start_pos, modified_null_end_pos);
479  int64_t h = get_valid_buf_end_pos(num_elems, null_start_pos, modified_null_end_pos);
480  while (l < h) {
481  int64_t mid = l + (h - l) / 2;
482  auto const target_row_idx = partition_rowid_buf[ordered_index_buf[mid]];
483  auto const cur_value = col_buf[target_row_idx];
484  if (cmp(target_value, cur_value)) {
485  h = mid;
486  } else {
487  l = mid + 1;
488  }
489  }
490  int64_t target_offset = l;
491  int64_t candidate_row_idx = partition_rowid_buf[ordered_index_buf[target_offset]];
492  while (col_buf[candidate_row_idx] == target_value && target_offset < num_elems) {
493  if (candidate_row_idx == cur_row_idx) {
494  return target_offset;
495  }
496  candidate_row_idx = partition_rowid_buf[ordered_index_buf[++target_offset]];
497  }
498  return -1;
499 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_start_pos(const int64_t null_start_pos, const int64_t null_end_pos)
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_end_pos(const int64_t num_elems, const int64_t null_start_pos, const int64_t null_end_pos)

+ Here is the call graph for this function:

template<typename AGG_VALUE_TYPE >
void compute_derived_aggregates ( SumAndCountPair< AGG_VALUE_TYPE > *  aggregation_tree_for_partition,
SumAndCountPair< AGG_VALUE_TYPE > &  res,
size_t  query_range_start_idx,
size_t  query_range_end_idx,
size_t  leaf_level,
size_t  tree_fanout,
AGG_VALUE_TYPE  invalid_val,
AGG_VALUE_TYPE  null_val 
)
inline

Definition at line 956 of file RuntimeFunctions.cpp.

References SumAndCountPair< T >::count, getStartOffsetForSegmentTreeTraversal(), and SumAndCountPair< T >::sum.

964  {
965  size_t leaf_start_idx = getStartOffsetForSegmentTreeTraversal(leaf_level, tree_fanout);
966  size_t begin = leaf_start_idx + query_range_start_idx;
967  size_t end = leaf_start_idx + query_range_end_idx;
968  SumAndCountPair<AGG_VALUE_TYPE> null_res{null_val, 0};
969  SumAndCountPair<AGG_VALUE_TYPE> invalid_res{invalid_val, 0};
970  bool all_nulls = true;
971  for (int level = leaf_level; level >= 0; level--) {
972  size_t parentBegin = begin / tree_fanout;
973  size_t parentEnd = (end - 1) / tree_fanout;
974  if (parentBegin == parentEnd) {
975  for (size_t pos = begin; pos < end; pos++) {
976  if (aggregation_tree_for_partition[pos].sum != null_val) {
977  all_nulls = false;
978  res.sum += aggregation_tree_for_partition[pos].sum;
979  res.count += aggregation_tree_for_partition[pos].count;
980  }
981  }
982  if (all_nulls) {
983  res = null_res;
984  }
985  return;
986  } else if (parentBegin > parentEnd) {
987  res = null_res;
988  return;
989  }
990  size_t group_begin = (parentBegin * tree_fanout) + 1;
991  if (begin != group_begin) {
992  size_t limit = (parentBegin * tree_fanout) + tree_fanout + 1;
993  for (size_t pos = begin; pos < limit; pos++) {
994  if (aggregation_tree_for_partition[pos].sum != null_val) {
995  all_nulls = false;
996  res.sum += aggregation_tree_for_partition[pos].sum;
997  res.count += aggregation_tree_for_partition[pos].count;
998  }
999  }
1000  parentBegin++;
1001  }
1002  size_t group_end = (parentEnd * tree_fanout) + 1;
1003  if (end != group_end) {
1004  for (size_t pos = group_end; pos < end; pos++) {
1005  if (aggregation_tree_for_partition[pos].sum != null_val) {
1006  all_nulls = false;
1007  res.sum += aggregation_tree_for_partition[pos].sum;
1008  res.count += aggregation_tree_for_partition[pos].count;
1009  }
1010  }
1011  }
1012  begin = parentBegin;
1013  end = parentEnd;
1014  }
1015  res = invalid_res;
1016  return;
1017 }
RUNTIME_EXPORT ALWAYS_INLINE size_t getStartOffsetForSegmentTreeTraversal(size_t level, size_t tree_fanout)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_int64_t_lower_bound ( const int64_t  entry_cnt,
const int64_t  target_value,
const int64_t *  col_buf 
)

Definition at line 428 of file RuntimeFunctions.cpp.

430  {
431  int64_t l = 0;
432  int64_t h = entry_cnt - 1;
433  while (l < h) {
434  int64_t mid = l + (h - l) / 2;
435  if (target_value < col_buf[mid]) {
436  h = mid;
437  } else {
438  l = mid + 1;
439  }
440  }
441  return l;
442 }
template<typename TARGET_VAL_TYPE , typename COL_TYPE , typename NULL_TYPE >
int64_t compute_lower_bound_from_ordered_partition_index ( const int64_t  num_elems,
const TARGET_VAL_TYPE  target_val,
const COL_TYPE *  col_buf,
const int32_t *  partition_rowid_buf,
const int64_t *  ordered_index_buf,
const NULL_TYPE  null_val,
const bool  nulls_first,
const int64_t  null_start_offset,
const int64_t  null_end_offset 
)
inline

Definition at line 539 of file RuntimeFunctions.cpp.

References get_valid_buf_end_pos(), and get_valid_buf_start_pos().

548  {
549  if (target_val == null_val) {
550  return null_start_offset;
551  }
552  auto const modified_null_end_pos = nulls_first ? null_end_offset - 1 : null_end_offset;
553  int64_t l = get_valid_buf_start_pos(null_start_offset, modified_null_end_pos);
554  int64_t h = get_valid_buf_end_pos(num_elems, null_start_offset, modified_null_end_pos);
555  while (l < h) {
556  int64_t mid = l + (h - l) / 2;
557  if (target_val <= col_buf[partition_rowid_buf[ordered_index_buf[mid]]]) {
558  h = mid;
559  } else {
560  l = mid + 1;
561  }
562  }
563  return l;
564 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_start_pos(const int64_t null_start_pos, const int64_t null_end_pos)
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_end_pos(const int64_t num_elems, const int64_t null_start_pos, const int64_t null_end_pos)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_row_mode_end_index_add ( int64_t  candidate_index,
int64_t  current_partition_start_offset,
int64_t  frame_bound,
int64_t  num_current_partition_elem 
)

Definition at line 788 of file RuntimeFunctions.cpp.

791  {
792  int64_t index = candidate_index - current_partition_start_offset + frame_bound;
793  return index >= num_current_partition_elem ? num_current_partition_elem : index + 1;
794 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_row_mode_end_index_sub ( int64_t  candidate_index,
int64_t  current_partition_start_offset,
int64_t  frame_bound 
)

Definition at line 780 of file RuntimeFunctions.cpp.

782  {
783  int64_t index = candidate_index - current_partition_start_offset - frame_bound;
784  return index < 0 ? 0 : index + 1;
785 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_row_mode_start_index_add ( int64_t  candidate_index,
int64_t  current_partition_start_offset,
int64_t  frame_bound,
int64_t  num_current_partition_elem 
)

Definition at line 771 of file RuntimeFunctions.cpp.

774  {
775  int64_t index = candidate_index - current_partition_start_offset + frame_bound;
776  return index >= num_current_partition_elem ? num_current_partition_elem : index;
777 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t compute_row_mode_start_index_sub ( int64_t  candidate_index,
int64_t  current_partition_start_offset,
int64_t  frame_bound 
)

Definition at line 763 of file RuntimeFunctions.cpp.

765  {
766  int64_t index = candidate_index - current_partition_start_offset - frame_bound;
767  return index < 0 ? 0 : index;
768 }
template<typename TARGET_VAL_TYPE , typename COL_TYPE , typename NULL_TYPE >
int64_t compute_upper_bound_from_ordered_partition_index ( const int64_t  num_elems,
const TARGET_VAL_TYPE  target_val,
const COL_TYPE *  col_buf,
const int32_t *  partition_rowid_buf,
const int64_t *  ordered_index_buf,
const NULL_TYPE  null_val,
const bool  nulls_first,
const int64_t  null_start_offset,
const int64_t  null_end_offset 
)
inline

Definition at line 620 of file RuntimeFunctions.cpp.

References get_valid_buf_end_pos(), and get_valid_buf_start_pos().

629  {
630  if (target_val == null_val) {
631  return null_end_offset;
632  }
633  auto const modified_null_end_pos = nulls_first ? null_end_offset - 1 : null_end_offset;
634  int64_t l = get_valid_buf_start_pos(null_start_offset, modified_null_end_pos);
635  int64_t h = get_valid_buf_end_pos(num_elems, null_start_offset, modified_null_end_pos);
636  while (l < h) {
637  int64_t mid = l + (h - l) / 2;
638  if (target_val >= col_buf[partition_rowid_buf[ordered_index_buf[mid]]]) {
639  l = mid + 1;
640  } else {
641  h = mid;
642  }
643  }
644  return l;
645 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_start_pos(const int64_t null_start_pos, const int64_t null_end_pos)
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_end_pos(const int64_t num_elems, const int64_t null_start_pos, const int64_t null_end_pos)

+ Here is the call graph for this function:

template<AggFuncType AGG_FUNC_TYPE, typename AGG_TYPE >
AGG_TYPE compute_window_func_via_aggregation_tree ( AGG_TYPE *  aggregation_tree_for_partition,
size_t  query_range_start_idx,
size_t  query_range_end_idx,
size_t  leaf_level,
size_t  tree_fanout,
AGG_TYPE  init_val,
AGG_TYPE  invalid_val,
AGG_TYPE  null_val 
)
inline

Definition at line 847 of file RuntimeFunctions.cpp.

References getStartOffsetForSegmentTreeTraversal(), and run_benchmark_import::res.

855  {
856  size_t leaf_start_idx = getStartOffsetForSegmentTreeTraversal(leaf_level, tree_fanout);
857  size_t begin = leaf_start_idx + query_range_start_idx;
858  size_t end = leaf_start_idx + query_range_end_idx;
859  AGG_TYPE res = init_val;
860  bool all_nulls = true;
861  for (int level = leaf_level; level >= 0; level--) {
862  size_t parentBegin = begin / tree_fanout;
863  size_t parentEnd = (end - 1) / tree_fanout;
864  if (parentBegin == parentEnd) {
865  for (size_t pos = begin; pos < end; pos++) {
866  if (aggregation_tree_for_partition[pos] != null_val) {
867  all_nulls = false;
868  res = agg_func<AGG_FUNC_TYPE>(res, aggregation_tree_for_partition[pos]);
869  }
870  }
871  return all_nulls ? null_val : res;
872  } else if (parentBegin > parentEnd) {
873  return null_val;
874  }
875  size_t group_begin = (parentBegin * tree_fanout) + 1;
876  if (begin != group_begin) {
877  size_t limit = (parentBegin * tree_fanout) + tree_fanout + 1;
878  for (size_t pos = begin; pos < limit; pos++) {
879  if (aggregation_tree_for_partition[pos] != null_val) {
880  all_nulls = false;
881  res = agg_func<AGG_FUNC_TYPE>(res, aggregation_tree_for_partition[pos]);
882  }
883  }
884  parentBegin++;
885  }
886  size_t group_end = (parentEnd * tree_fanout) + 1;
887  if (end != group_end) {
888  for (size_t pos = group_end; pos < end; pos++) {
889  if (aggregation_tree_for_partition[pos] != null_val) {
890  all_nulls = false;
891  res = agg_func<AGG_FUNC_TYPE>(res, aggregation_tree_for_partition[pos]);
892  }
893  }
894  }
895  begin = parentBegin;
896  end = parentEnd;
897  }
898  return invalid_val;
899 }
RUNTIME_EXPORT ALWAYS_INLINE size_t getStartOffsetForSegmentTreeTraversal(size_t level, size_t tree_fanout)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t decimal_ceil ( const int64_t  x,
const int64_t  scale 
)

Definition at line 1591 of file RuntimeFunctions.cpp.

References decimal_floor().

1592  {
1593  return decimal_floor(x, scale) + (x % scale ? scale : 0);
1594 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t decimal_floor(const int64_t x, const int64_t scale)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t decimal_floor ( const int64_t  x,
const int64_t  scale 
)

Definition at line 1580 of file RuntimeFunctions.cpp.

Referenced by decimal_ceil().

1581  {
1582  if (x >= 0) {
1583  return x / scale * scale;
1584  }
1585  if (!(x % scale)) {
1586  return x;
1587  }
1588  return x / scale * scale - scale;
1589 }

+ Here is the caller graph for this function:

GPU_RT_STUB int64_t* declare_dynamic_shared_memory ( )

Definition at line 1836 of file RuntimeFunctions.cpp.

1836  {
1837  return nullptr;
1838 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t encode_date ( int64_t  decoded_val,
int64_t  null_val,
int64_t  multiplier 
)

Definition at line 756 of file RuntimeFunctions.cpp.

758  {
759  return decoded_val == null_val ? decoded_val : decoded_val * multiplier;
760 }
template<typename T >
T fill_missing_value ( int64_t const  cur_idx,
T const  null_val,
T *const  col_buf,
int64_t const  num_elems_in_partition,
int32_t *const  partition_rowid_buf,
int64_t *const  ordered_index_buf,
bool const  is_forward_fill 
)

Definition at line 1075 of file RuntimeFunctions.cpp.

References heavydb.dtypes::T.

1081  {
1082  T const cur_val = col_buf[partition_rowid_buf[ordered_index_buf[cur_idx]]];
1083  if (cur_val == null_val) {
1084  if (is_forward_fill) {
1085  for (int64_t cand_idx = cur_idx - 1; cand_idx >= 0; --cand_idx) {
1086  T const candidate_val = col_buf[partition_rowid_buf[ordered_index_buf[cand_idx]]];
1087  if (candidate_val != null_val) {
1088  return candidate_val;
1089  }
1090  }
1091  } else {
1092  for (int64_t cand_idx = cur_idx + 1; cand_idx < num_elems_in_partition;
1093  ++cand_idx) {
1094  T const candidate_val = col_buf[partition_rowid_buf[ordered_index_buf[cand_idx]]];
1095  if (candidate_val != null_val) {
1096  return candidate_val;
1097  }
1098  }
1099  }
1100  }
1101  return cur_val;
1102 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t floor_div_lhs ( const int64_t  dividend,
const int64_t  divisor 
)

Definition at line 233 of file RuntimeFunctions.cpp.

Referenced by floor_div_nullable_lhs().

234  {
235  return (dividend < 0 ? dividend - (divisor - 1) : dividend) / divisor;
236 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t floor_div_nullable_lhs ( const int64_t  dividend,
const int64_t  divisor,
const int64_t  null_val 
)

Definition at line 241 of file RuntimeFunctions.cpp.

References floor_div_lhs().

243  {
244  return dividend == null_val ? null_val : floor_div_lhs(dividend, divisor);
245 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t floor_div_lhs(const int64_t dividend, const int64_t divisor)

+ Here is the call graph for this function:

GPU_RT_STUB void force_sync ( )

Definition at line 1804 of file RuntimeFunctions.cpp.

1804 {}
GPU_RT_STUB int64_t get_block_index ( )

Definition at line 1840 of file RuntimeFunctions.cpp.

1840  {
1841  return 0;
1842 }
RUNTIME_EXPORT ALWAYS_INLINE double* get_double_aggregation_tree ( int64_t **  aggregation_trees,
size_t  partition_idx 
)

Definition at line 802 of file RuntimeFunctions.cpp.

804  {
805  double** casted_aggregation_trees = reinterpret_cast<double**>(aggregation_trees);
806  return casted_aggregation_trees[partition_idx];
807 }
RUNTIME_EXPORT ALWAYS_INLINE SumAndCountPair<double>* get_double_derived_aggregation_tree ( int64_t **  aggregation_trees,
size_t  partition_idx 
)

Definition at line 817 of file RuntimeFunctions.cpp.

817  {
818  SumAndCountPair<double>** casted_aggregation_trees =
819  reinterpret_cast<SumAndCountPair<double>**>(aggregation_trees);
820  return casted_aggregation_trees[partition_idx];
821 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t get_error_code ( int32_t *  error_codes)

Definition at line 1860 of file RuntimeFunctions.cpp.

References pos_start_impl().

Referenced by multifrag_query(), and multifrag_query_hoisted_literals().

1860  {
1861  return error_codes[pos_start_impl(nullptr)];
1862 }
__device__ int32_t pos_start_impl(const int32_t *row_index_resume)
Definition: cuda_mapd_rt.cu:27

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_group_value_fast_keyless ( int64_t *  groups_buffer,
const int64_t  key,
const int64_t  min_key,
const int64_t  ,
const uint32_t  row_size_quad 
)

Definition at line 2102 of file RuntimeFunctions.cpp.

2107  {
2108  return groups_buffer + row_size_quad * (key - min_key);
2109 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_group_value_fast_keyless_semiprivate ( int64_t *  groups_buffer,
const int64_t  key,
const int64_t  min_key,
const int64_t  ,
const uint32_t  row_size_quad,
const uint8_t  thread_warp_idx,
const uint8_t  warp_size 
)

Definition at line 2111 of file RuntimeFunctions.cpp.

2118  {
2119  return groups_buffer + row_size_quad * (warp_size * (key - min_key) + thread_warp_idx);
2120 }
__device__ int8_t thread_warp_idx(const int8_t warp_sz)
Definition: cuda_mapd_rt.cu:39
RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_integer_aggregation_tree ( int64_t **  aggregation_trees,
size_t  partition_idx 
)

Definition at line 796 of file RuntimeFunctions.cpp.

798  {
799  return aggregation_trees[partition_idx];
800 }
RUNTIME_EXPORT ALWAYS_INLINE SumAndCountPair<int64_t>* get_integer_derived_aggregation_tree ( int64_t **  aggregation_trees,
size_t  partition_idx 
)

Definition at line 810 of file RuntimeFunctions.cpp.

810  {
811  SumAndCountPair<int64_t>** casted_aggregation_trees =
812  reinterpret_cast<SumAndCountPair<int64_t>**>(aggregation_trees);
813  return casted_aggregation_trees[partition_idx];
814 }
template<typename T >
ALWAYS_INLINE int64_t* get_matching_group_value ( int64_t *  groups_buffer,
const uint32_t  h,
const T *  key,
const uint32_t  key_count,
const uint32_t  row_size_quad 
)

Definition at line 1930 of file RuntimeFunctions.cpp.

References align_to_int64(), and heavydb.dtypes::T.

1934  {
1935  auto off = h * row_size_quad;
1936  auto row_ptr = reinterpret_cast<T*>(groups_buffer + off);
1937  if (*row_ptr == get_empty_key<T>()) {
1938  memcpy(row_ptr, key, key_count * sizeof(T));
1939  auto row_ptr_i8 = reinterpret_cast<int8_t*>(row_ptr + key_count);
1940  return reinterpret_cast<int64_t*>(align_to_int64(row_ptr_i8));
1941  }
1942  if (memcmp(row_ptr, key, key_count * sizeof(T)) == 0) {
1943  auto row_ptr_i8 = reinterpret_cast<int8_t*>(row_ptr + key_count);
1944  return reinterpret_cast<int64_t*>(align_to_int64(row_ptr_i8));
1945  }
1946  return nullptr;
1947 }
FORCE_INLINE HOST DEVICE T align_to_int64(T addr)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value ( int64_t *  groups_buffer,
const uint32_t  h,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  key_width,
const uint32_t  row_size_quad 
)

Definition at line 1949 of file RuntimeFunctions.cpp.

References get_matching_group_value().

1955  {
1956  switch (key_width) {
1957  case 4:
1958  return get_matching_group_value(groups_buffer,
1959  h,
1960  reinterpret_cast<const int32_t*>(key),
1961  key_count,
1962  row_size_quad);
1963  case 8:
1964  return get_matching_group_value(groups_buffer, h, key, key_count, row_size_quad);
1965  default:;
1966  }
1967  return nullptr;
1968 }
__device__ int64_t * get_matching_group_value(int64_t *groups_buffer, const uint32_t h, const T *key, const uint32_t key_count, const uint32_t row_size_quad)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value_columnar ( int64_t *  groups_buffer,
const uint32_t  h,
const int64_t *  key,
const uint32_t  key_qw_count,
const size_t  entry_count 
)

Definition at line 2018 of file RuntimeFunctions.cpp.

References EMPTY_KEY_64.

2023  {
2024  auto off = h;
2025  if (groups_buffer[off] == EMPTY_KEY_64) {
2026  for (size_t i = 0; i < key_qw_count; ++i) {
2027  groups_buffer[off] = key[i];
2028  off += entry_count;
2029  }
2030  return &groups_buffer[off];
2031  }
2032  off = h;
2033  for (size_t i = 0; i < key_qw_count; ++i) {
2034  if (groups_buffer[off] != key[i]) {
2035  return nullptr;
2036  }
2037  off += entry_count;
2038  }
2039  return &groups_buffer[off];
2040 }
#define EMPTY_KEY_64
template<typename T >
ALWAYS_INLINE int32_t get_matching_group_value_columnar_slot ( int64_t *  groups_buffer,
const uint32_t  entry_count,
const uint32_t  h,
const T *  key,
const uint32_t  key_count 
)

Definition at line 1971 of file RuntimeFunctions.cpp.

References heavydb.dtypes::T.

1975  {
1976  auto off = h;
1977  auto key_buffer = reinterpret_cast<T*>(groups_buffer);
1978  if (key_buffer[off] == get_empty_key<T>()) {
1979  for (size_t i = 0; i < key_count; ++i) {
1980  key_buffer[off] = key[i];
1981  off += entry_count;
1982  }
1983  return h;
1984  }
1985  off = h;
1986  for (size_t i = 0; i < key_count; ++i) {
1987  if (key_buffer[off] != key[i]) {
1988  return -1;
1989  }
1990  off += entry_count;
1991  }
1992  return h;
1993 }
RUNTIME_EXPORT ALWAYS_INLINE int32_t get_matching_group_value_columnar_slot ( int64_t *  groups_buffer,
const uint32_t  entry_count,
const uint32_t  h,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  key_width 
)

Definition at line 1996 of file RuntimeFunctions.cpp.

References get_matching_group_value_columnar_slot().

2001  {
2002  switch (key_width) {
2003  case 4:
2004  return get_matching_group_value_columnar_slot(groups_buffer,
2005  entry_count,
2006  h,
2007  reinterpret_cast<const int32_t*>(key),
2008  key_count);
2009  case 8:
2011  groups_buffer, entry_count, h, key, key_count);
2012  default:
2013  return -1;
2014  }
2015  return -1;
2016 }
__device__ int32_t get_matching_group_value_columnar_slot(int64_t *groups_buffer, const uint32_t entry_count, const uint32_t h, const T *key, const uint32_t key_count)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value_perfect_hash ( int64_t *  groups_buffer,
const uint32_t  hashed_index,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  row_size_quad 
)

Definition at line 2053 of file RuntimeFunctions.cpp.

References EMPTY_KEY_64.

2058  {
2059  uint32_t off = hashed_index * row_size_quad;
2060  if (groups_buffer[off] == EMPTY_KEY_64) {
2061  for (uint32_t i = 0; i < key_count; ++i) {
2062  groups_buffer[off + i] = key[i];
2063  }
2064  }
2065  return groups_buffer + off + key_count;
2066 }
#define EMPTY_KEY_64
RUNTIME_EXPORT ALWAYS_INLINE int64_t* get_matching_group_value_perfect_hash_keyless ( int64_t *  groups_buffer,
const uint32_t  hashed_index,
const uint32_t  row_size_quad 
)

For a particular hashed index (only used with multi-column perfect hash group by) it returns the row-wise offset of the group in the output buffer. Since it is intended for keyless hash use, it assumes there is no group columns prepending the output buffer.

Definition at line 2075 of file RuntimeFunctions.cpp.

2077  {
2078  return groups_buffer + row_size_quad * hashed_index;
2079 }
GPU_RT_STUB int64_t get_thread_index ( )

Definition at line 1832 of file RuntimeFunctions.cpp.

1832  {
1833  return 0;
1834 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_end_pos ( const int64_t  num_elems,
const int64_t  null_start_pos,
const int64_t  null_end_pos 
)

Definition at line 450 of file RuntimeFunctions.cpp.

Referenced by compute_current_row_idx_in_frame(), compute_lower_bound_from_ordered_partition_index(), and compute_upper_bound_from_ordered_partition_index().

452  {
453  return null_end_pos == num_elems ? null_start_pos : num_elems;
454 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t get_valid_buf_start_pos ( const int64_t  null_start_pos,
const int64_t  null_end_pos 
)

Definition at line 445 of file RuntimeFunctions.cpp.

Referenced by compute_current_row_idx_in_frame(), compute_lower_bound_from_ordered_partition_index(), and compute_upper_bound_from_ordered_partition_index().

445  {
446  return null_start_pos == 0 ? null_end_pos + 1 : 0;
447 }

+ Here is the caller graph for this function:

template<typename COL_TYPE , typename LOGICAL_TYPE >
LOGICAL_TYPE get_value_in_window_frame ( const int64_t  target_row_idx_in_frame,
const int64_t  frame_start_offset,
const int64_t  frame_end_offset,
const COL_TYPE *  col_buf,
const int32_t *  partition_rowid_buf,
const int64_t *  ordered_index_buf,
const LOGICAL_TYPE  logical_null_val,
const LOGICAL_TYPE  col_null_val 
)
inline

Definition at line 701 of file RuntimeFunctions.cpp.

708  {
709  if (target_row_idx_in_frame < frame_start_offset ||
710  target_row_idx_in_frame > frame_end_offset) {
711  return logical_null_val;
712  }
713  const auto target_offset =
714  partition_rowid_buf[ordered_index_buf[target_row_idx_in_frame]];
715  LOGICAL_TYPE target_val = col_buf[target_offset];
716  if (target_val == col_null_val) {
717  return logical_null_val;
718  }
719  return target_val;
720 }
RUNTIME_EXPORT ALWAYS_INLINE size_t getStartOffsetForSegmentTreeTraversal ( size_t  level,
size_t  tree_fanout 
)

Definition at line 824 of file RuntimeFunctions.cpp.

Referenced by compute_derived_aggregates(), and compute_window_func_via_aggregation_tree().

824  {
825  size_t offset = 0;
826  for (size_t i = 0; i < level; i++) {
827  offset += pow(tree_fanout, i);
828  }
829  return offset;
830 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT NEVER_INLINE int32_t group_buff_idx_impl ( )

Definition at line 1820 of file RuntimeFunctions.cpp.

References pos_start_impl().

1820  {
1821  return pos_start_impl(nullptr);
1822 }
__device__ int32_t pos_start_impl(const int32_t *row_index_resume)
Definition: cuda_mapd_rt.cu:27

+ Here is the call graph for this function:

RUNTIME_EXPORT NEVER_INLINE void init_columnar_group_by_buffer_gpu ( int64_t *  groups_buffer,
const int64_t *  init_vals,
const uint32_t  groups_buffer_entry_count,
const uint32_t  key_qw_count,
const uint32_t  agg_col_count,
const bool  keyless,
const bool  blocks_share_memory,
const int32_t  frag_idx 
)

Definition at line 1900 of file RuntimeFunctions.cpp.

1908  {
1909 #ifndef _WIN32
1910  // the body is not really needed, just make sure the call is not optimized away
1911  assert(groups_buffer);
1912 #endif
1913 }
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_gpu ( int64_t *  groups_buffer,
const int64_t *  init_vals,
const uint32_t  groups_buffer_entry_count,
const uint32_t  key_qw_count,
const uint32_t  agg_col_count,
const bool  keyless,
const int8_t  warp_size 
)

Definition at line 1886 of file RuntimeFunctions.cpp.

1893  {
1894 #ifndef _WIN32
1895  // the body is not really needed, just make sure the call is not optimized away
1896  assert(groups_buffer);
1897 #endif
1898 }
RUNTIME_EXPORT NEVER_INLINE void init_group_by_buffer_impl ( int64_t *  groups_buffer,
const int64_t *  init_vals,
const uint32_t  groups_buffer_entry_count,
const uint32_t  key_qw_count,
const uint32_t  agg_col_count,
const bool  keyless,
const int8_t  warp_size 
)

Definition at line 1915 of file RuntimeFunctions.cpp.

1922  {
1923 #ifndef _WIN32
1924  // the body is not really needed, just make sure the call is not optimized away
1925  assert(groups_buffer);
1926 #endif
1927 }
RUNTIME_EXPORT int64_t* init_shared_mem ( const int64_t *  global_groups_buffer,
const int32_t  groups_buffer_size 
)

Definition at line 1881 of file RuntimeFunctions.cpp.

1882  {
1883  return nullptr;
1884 }
RUNTIME_EXPORT NEVER_INLINE const int64_t* init_shared_mem_nop ( const int64_t *  groups_buffer,
const int32_t  groups_buffer_size 
)

Definition at line 1866 of file RuntimeFunctions.cpp.

1868  {
1869  return groups_buffer;
1870 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t key_for_string_encoded ( const int32_t  str_id)

Definition at line 2149 of file RuntimeFunctions.cpp.

2149  {
2150  return str_id;
2151 }
RUNTIME_EXPORT NEVER_INLINE void linear_probabilistic_count ( uint8_t *  bitmap,
const uint32_t  bitmap_bytes,
const uint8_t *  key_bytes,
const uint32_t  key_len 
)

Definition at line 2373 of file RuntimeFunctions.cpp.

References MurmurHash3().

2377  {
2378  const uint32_t bit_pos = MurmurHash3(key_bytes, key_len, 0) % (bitmap_bytes * 8);
2379  const uint32_t word_idx = bit_pos / 32;
2380  const uint32_t bit_idx = bit_pos % 32;
2381  reinterpret_cast<uint32_t*>(bitmap)[word_idx] |= 1 << bit_idx;
2382 }
RUNTIME_EXPORT NEVER_INLINE DEVICE uint32_t MurmurHash3(const void *key, int len, const uint32_t seed)
Definition: MurmurHash.cpp:33

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE double load_avg_decimal ( const int64_t *  sum,
const int64_t *  count,
const double  null_val,
const uint32_t  scale 
)

Definition at line 2352 of file RuntimeFunctions.cpp.

2355  {
2356  return *count != 0 ? (static_cast<double>(*sum) / pow(10, scale)) / *count : null_val;
2357 }
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_double ( const int64_t *  agg,
const int64_t *  count,
const double  null_val 
)

Definition at line 2359 of file RuntimeFunctions.cpp.

2361  {
2362  return *count != 0 ? *reinterpret_cast<const double*>(may_alias_ptr(agg)) / *count
2363  : null_val;
2364 }
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_float ( const int32_t *  agg,
const int32_t *  count,
const double  null_val 
)

Definition at line 2366 of file RuntimeFunctions.cpp.

2368  {
2369  return *count != 0 ? *reinterpret_cast<const float*>(may_alias_ptr(agg)) / *count
2370  : null_val;
2371 }
RUNTIME_EXPORT ALWAYS_INLINE double load_avg_int ( const int64_t *  sum,
const int64_t *  count,
const double  null_val 
)

Definition at line 2346 of file RuntimeFunctions.cpp.

2348  {
2349  return *count != 0 ? static_cast<double>(*sum) / *count : null_val;
2350 }
RUNTIME_EXPORT ALWAYS_INLINE double load_double ( const int64_t *  agg)

Definition at line 2338 of file RuntimeFunctions.cpp.

2338  {
2339  return *reinterpret_cast<const double*>(may_alias_ptr(agg));
2340 }
RUNTIME_EXPORT ALWAYS_INLINE float load_float ( const int32_t *  agg)

Definition at line 2342 of file RuntimeFunctions.cpp.

2342  {
2343  return *reinterpret_cast<const float*>(may_alias_ptr(agg));
2344 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_and ( const int8_t  lhs,
const int8_t  rhs,
const int8_t  null_val 
)

Definition at line 336 of file RuntimeFunctions.cpp.

Referenced by process_detections().

338  {
339  if (lhs == null_val) {
340  return rhs == 0 ? rhs : null_val;
341  }
342  if (rhs == null_val) {
343  return lhs == 0 ? lhs : null_val;
344  }
345  return (lhs && rhs) ? 1 : 0;
346 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_not ( const int8_t  operand,
const int8_t  null_val 
)

Definition at line 331 of file RuntimeFunctions.cpp.

332  {
333  return operand == null_val ? operand : (operand ? 0 : 1);
334 }
RUNTIME_EXPORT ALWAYS_INLINE int8_t logical_or ( const int8_t  lhs,
const int8_t  rhs,
const int8_t  null_val 
)

Definition at line 348 of file RuntimeFunctions.cpp.

350  {
351  if (lhs == null_val) {
352  return rhs == 0 ? null_val : rhs;
353  }
354  if (rhs == null_val) {
355  return lhs == 0 ? null_val : lhs;
356  }
357  return (lhs || rhs) ? 1 : 0;
358 }
ALWAYS_INLINE DEVICE int32_t map_string_dict_id ( const int32_t  string_id,
const int64_t  translation_map_handle,
const int32_t  min_source_id 
)

Definition at line 2154 of file RuntimeFunctions.cpp.

2156  {
2157  const int32_t* translation_map =
2158  reinterpret_cast<const int32_t*>(translation_map_handle);
2159  return translation_map[string_id - min_source_id];
2160 }
RUNTIME_EXPORT void multifrag_query ( int32_t *  error_codes,
int32_t *  total_matched,
int64_t **  out,
const uint32_t *  num_fragments_ptr,
const uint32_t *  num_tables_ptr,
const uint32_t *  row_index_resume,
const int8_t ***  col_buffers,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
const int64_t *  init_agg_value,
const int64_t *  join_hash_tables,
const int8_t *  row_func_mgr 
)

Definition at line 2465 of file RuntimeFunctions.cpp.

References get_error_code(), and query_stub().

2477  {
2478  uint32_t const num_fragments = *num_fragments_ptr;
2479  uint32_t const num_tables = *num_tables_ptr;
2480  // num_fragments_ptr and num_tables_ptr are replaced by frag_idx when passed below.
2481  for (uint32_t frag_idx = 0;
2482  frag_idx < num_fragments && get_error_code(error_codes) == 0;
2483  ++frag_idx) {
2484  query_stub(error_codes,
2485  total_matched,
2486  out,
2487  frag_idx,
2488  row_index_resume,
2489  col_buffers ? col_buffers[frag_idx] : nullptr,
2490  &num_rows[frag_idx * num_tables],
2491  &frag_row_offsets[frag_idx * num_tables],
2492  max_matched,
2493  init_agg_value,
2494  join_hash_tables,
2495  row_func_mgr);
2496  }
2497 }
RUNTIME_EXPORT NEVER_INLINE void query_stub(int32_t *error_codes, int32_t *total_matched, int64_t **out, const uint32_t frag_idx, const uint32_t *row_index_resume, const int8_t **col_buffers, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, const int64_t *join_hash_tables, const int8_t *row_func_mgr)
RUNTIME_EXPORT ALWAYS_INLINE int32_t get_error_code(int32_t *error_codes)

+ Here is the call graph for this function:

RUNTIME_EXPORT void multifrag_query_hoisted_literals ( int32_t *  error_codes,
int32_t *  total_matched,
int64_t **  out,
const uint32_t *  num_fragments_ptr,
const uint32_t *  num_tables_ptr,
const uint32_t *  row_index_resume,
const int8_t ***  col_buffers,
const int8_t *  literals,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
const int64_t *  init_agg_value,
const int64_t *  join_hash_tables,
const int8_t *  row_func_mgr 
)

Definition at line 2407 of file RuntimeFunctions.cpp.

References get_error_code(), and query_stub_hoisted_literals().

2421  {
2422  uint32_t const num_fragments = *num_fragments_ptr;
2423  uint32_t const num_tables = *num_tables_ptr;
2424  // num_fragments_ptr and num_tables_ptr are replaced by frag_idx when passed below.
2425  for (uint32_t frag_idx = 0;
2426  frag_idx < num_fragments && get_error_code(error_codes) == 0;
2427  ++frag_idx) {
2428  query_stub_hoisted_literals(error_codes,
2429  total_matched,
2430  out,
2431  frag_idx,
2432  row_index_resume,
2433  col_buffers ? col_buffers[frag_idx] : nullptr,
2434  literals,
2435  &num_rows[frag_idx * num_tables],
2436  &frag_row_offsets[frag_idx * num_tables],
2437  max_matched,
2438  init_agg_value,
2439  join_hash_tables,
2440  row_func_mgr);
2441  }
2442 }
RUNTIME_EXPORT NEVER_INLINE void query_stub_hoisted_literals(int32_t *error_codes, int32_t *total_matched, int64_t **out, const uint32_t frag_idx, const uint32_t *row_index_resume, const int8_t **col_buffers, const int8_t *literals, const int64_t *num_rows, const uint64_t *frag_row_offsets, const int32_t *max_matched, const int64_t *init_agg_value, const int64_t *join_hash_tables, const int8_t *row_func_mgr)
RUNTIME_EXPORT ALWAYS_INLINE int32_t get_error_code(int32_t *error_codes)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE double percent_window_func ( const int64_t  output_buff,
const int64_t  pos 
)

Definition at line 2332 of file RuntimeFunctions.cpp.

2334  {
2335  return reinterpret_cast<const double*>(output_buff)[pos];
2336 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE bool point_double_is_null ( double *  point)

Definition at line 2511 of file RuntimeFunctions.cpp.

2511  {
2512  constexpr double null_array_double = 2 * DBL_MIN; // Shared/InlineNullValues.h
2513  return point == nullptr || *point == null_array_double;
2514 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE bool point_int32_is_null ( int32_t *  point)

Definition at line 2506 of file RuntimeFunctions.cpp.

2506  {
2507  constexpr uint32_t null_array_compressed_32 = 0x80000000U; // Shared/InlineNullValues.h
2508  return point == nullptr || uint32_t(*point) == null_array_compressed_32;
2509 }
RUNTIME_EXPORT NEVER_INLINE int32_t pos_start_impl ( int32_t const *  row_index_resume)

Definition at line 1816 of file RuntimeFunctions.cpp.

1816  {
1817  return row_index_resume ? *row_index_resume : 0;
1818 }
RUNTIME_EXPORT NEVER_INLINE int32_t pos_step_impl ( )

Definition at line 1824 of file RuntimeFunctions.cpp.

1824  {
1825  return 1;
1826 }
RUNTIME_EXPORT NEVER_INLINE void query_stub ( int32_t *  error_codes,
int32_t *  total_matched,
int64_t **  out,
const uint32_t  frag_idx,
const uint32_t *  row_index_resume,
const int8_t **  col_buffers,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
const int64_t *  init_agg_value,
const int64_t *  join_hash_tables,
const int8_t *  row_func_mgr 
)

Definition at line 2445 of file RuntimeFunctions.cpp.

Referenced by multifrag_query().

2456  {
2457 #ifndef _WIN32
2458  assert(error_codes || total_matched || out || frag_idx || row_index_resume ||
2459  col_buffers || num_rows || frag_row_offsets || max_matched || init_agg_value ||
2460  join_hash_tables || row_func_mgr);
2461 #endif
2462 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT NEVER_INLINE void query_stub_hoisted_literals ( int32_t *  error_codes,
int32_t *  total_matched,
int64_t **  out,
const uint32_t  frag_idx,
const uint32_t *  row_index_resume,
const int8_t **  col_buffers,
const int8_t *  literals,
const int64_t *  num_rows,
const uint64_t *  frag_row_offsets,
const int32_t *  max_matched,
const int64_t *  init_agg_value,
const int64_t *  join_hash_tables,
const int8_t *  row_func_mgr 
)

Definition at line 2385 of file RuntimeFunctions.cpp.

Referenced by multifrag_query_hoisted_literals().

2398  {
2399 #ifndef _WIN32
2400  assert(error_codes || total_matched || out || frag_idx || row_index_resume ||
2401  col_buffers || literals || num_rows || frag_row_offsets || max_matched ||
2402  init_agg_value || join_hash_tables || row_func_mgr);
2403 #endif
2404 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE void record_error_code ( const int32_t  err_code,
int32_t *  error_codes 
)

Definition at line 1846 of file RuntimeFunctions.cpp.

References pos_start_impl().

1847  {
1848  // NB: never override persistent error codes (with code greater than zero).
1849  // On GPU, a projection query with a limit can run out of slots without it
1850  // being an actual error if the limit has been hit. If a persistent error
1851  // (division by zero, for example) occurs before running out of slots, we
1852  // have to avoid overriding it, because there's a risk that the query would
1853  // go through if we override with a potentially benign out-of-slots code.
1854  if (err_code && error_codes[pos_start_impl(nullptr)] <= 0) {
1855  error_codes[pos_start_impl(nullptr)] = err_code;
1856  }
1857 }
__device__ int32_t pos_start_impl(const int32_t *row_index_resume)
Definition: cuda_mapd_rt.cu:27

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t row_number_window_func ( const int64_t  output_buff,
const int64_t  pos 
)

Definition at line 2328 of file RuntimeFunctions.cpp.

2328  {
2329  return reinterpret_cast<const int64_t*>(output_buff)[pos];
2330 }
RUNTIME_EXPORT ALWAYS_INLINE DEVICE bool sample_ratio ( const double  proportion,
const int64_t  row_offset 
)

Definition at line 2197 of file RuntimeFunctions.cpp.

Referenced by ScalarExprVisitor< std::set< shared::TableKey > >::visit().

2199  {
2200  const int64_t threshold = 4294967296 * proportion;
2201  return (row_offset * 2654435761) % 4294967296 < threshold;
2202 }

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int64_t scale_decimal_down_not_nullable ( const int64_t  operand,
const int64_t  scale,
const int64_t  null_val 
)

Definition at line 223 of file RuntimeFunctions.cpp.

225  {
226  int64_t tmp = scale >> 1;
227  tmp = operand >= 0 ? operand + tmp : operand - tmp;
228  return tmp / scale;
229 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t scale_decimal_down_nullable ( const int64_t  operand,
const int64_t  scale,
const int64_t  null_val 
)

Definition at line 209 of file RuntimeFunctions.cpp.

211  {
212  // rounded scale down of a decimal
213  if (operand == null_val) {
214  return null_val;
215  }
216 
217  int64_t tmp = scale >> 1;
218  tmp = operand >= 0 ? operand + tmp : operand - tmp;
219  return tmp / scale;
220 }
RUNTIME_EXPORT ALWAYS_INLINE int64_t scale_decimal_up ( const int64_t  operand,
const uint64_t  scale,
const int64_t  operand_null_val,
const int64_t  result_null_val 
)

Definition at line 201 of file RuntimeFunctions.cpp.

204  {
205  return operand != operand_null_val ? operand * scale : result_null_val;
206 }
RUNTIME_EXPORT ALWAYS_INLINE void set_matching_group_value_perfect_hash_columnar ( int64_t *  groups_buffer,
const uint32_t  hashed_index,
const int64_t *  key,
const uint32_t  key_count,
const uint32_t  entry_count 
)

Definition at line 2086 of file RuntimeFunctions.cpp.

References EMPTY_KEY_64.

2090  {
2091  if (groups_buffer[hashed_index] == EMPTY_KEY_64) {
2092  for (uint32_t i = 0; i < key_count; i++) {
2093  groups_buffer[i * entry_count + hashed_index] = key[i];
2094  }
2095  }
2096 }
#define EMPTY_KEY_64
RUNTIME_EXPORT ALWAYS_INLINE StringView string_pack ( const int8_t *  ptr,
const int32_t  len 
)

Definition at line 2122 of file RuntimeFunctions.cpp.

2123  {
2124  return {reinterpret_cast<char const*>(ptr), static_cast<uint64_t>(len)};
2125 }
GPU_RT_STUB void sync_threadblock ( )

Definition at line 1808 of file RuntimeFunctions.cpp.

1808 {}
GPU_RT_STUB void sync_warp ( )

Definition at line 1806 of file RuntimeFunctions.cpp.

1806 {}
GPU_RT_STUB void sync_warp_protected ( int64_t  thread_pos,
int64_t  row_count 
)

Definition at line 1807 of file RuntimeFunctions.cpp.

1807 {}
GPU_RT_STUB int8_t thread_warp_idx ( const int8_t  warp_sz)

Definition at line 1828 of file RuntimeFunctions.cpp.

1828  {
1829  return 0;
1830 }
ALWAYS_INLINE DEVICE double tree_model_reg_predict ( const double *  regressor_inputs,
const int64_t  decision_tree_table_handle,
const int64_t  decision_tree_offsets_handle,
const int32_t  num_regressors,
const int32_t  num_trees,
const bool  compute_avg,
const double  null_value 
)

Definition at line 2162 of file RuntimeFunctions.cpp.

References DecisionTreeEntry::feature_index, DecisionTreeEntry::isSplitNode(), DecisionTreeEntry::left_child_row_idx, DecisionTreeEntry::right_child_row_idx, and DecisionTreeEntry::value.

2169  {
2170  for (int32_t regressor_idx = 0; regressor_idx < num_regressors; ++regressor_idx) {
2171  if (regressor_inputs[regressor_idx] == null_value) {
2172  return null_value;
2173  }
2174  }
2175  const DecisionTreeEntry* decision_tree_table =
2176  reinterpret_cast<const DecisionTreeEntry*>(decision_tree_table_handle);
2177  const int64_t* decision_tree_offsets =
2178  reinterpret_cast<const int64_t*>(decision_tree_offsets_handle);
2179  double sum_tree_results{0};
2180  for (int32_t tree_idx = 0; tree_idx < num_trees; ++tree_idx) {
2181  int64_t row_idx = decision_tree_offsets[tree_idx];
2182  while (true) {
2183  const DecisionTreeEntry& current_entry = decision_tree_table[row_idx];
2184  if (!current_entry.isSplitNode()) {
2185  sum_tree_results += current_entry.value;
2186  break;
2187  }
2188  const auto regressor_input = regressor_inputs[current_entry.feature_index];
2189  row_idx = regressor_input <= current_entry.value
2190  ? current_entry.left_child_row_idx
2191  : current_entry.right_child_row_idx;
2192  }
2193  }
2194  return compute_avg ? sum_tree_results / num_trees : sum_tree_results;
2195 }
double value
int64_t left_child_row_idx
int64_t feature_index
bool isSplitNode() const
int64_t right_child_row_idx

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count 
)

Definition at line 2205 of file RuntimeFunctions.cpp.

Referenced by ScalarExprVisitor< std::set< shared::TableKey > >::visit(), width_bucket_expr(), and width_bucket_nullable().

2209  {
2210  if (target_value < lower_bound) {
2211  return 0;
2212  } else if (target_value >= upper_bound) {
2213  return partition_count + 1;
2214  }
2215  return ((target_value - lower_bound) * scale_factor) + 1;
2216 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr ( const double  target_value,
const bool  reversed,
const double  lower_bound,
const double  upper_bound,
const int32_t  partition_count 
)

Definition at line 2280 of file RuntimeFunctions.cpp.

References width_bucket(), and width_bucket_reversed().

Referenced by CodeGenerator::codegen(), getExpressionRange(), and width_bucket_expr_nullable().

2284  {
2285  if (reversed) {
2286  return width_bucket_reversed(target_value,
2287  lower_bound,
2288  upper_bound,
2289  partition_count / (lower_bound - upper_bound),
2290  partition_count);
2291  }
2292  return width_bucket(target_value,
2293  lower_bound,
2294  upper_bound,
2295  partition_count / (upper_bound - lower_bound),
2296  partition_count);
2297 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr_no_oob_check ( const double  target_value,
const bool  reversed,
const double  lower_bound,
const double  upper_bound,
const int32_t  partition_count 
)

Definition at line 2314 of file RuntimeFunctions.cpp.

References width_bucket_no_oob_check(), and width_bucket_reversed_no_oob_check().

2318  {
2319  if (reversed) {
2321  target_value, lower_bound, partition_count / (lower_bound - upper_bound));
2322  }
2324  target_value, lower_bound, partition_count / (upper_bound - lower_bound));
2325 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_no_oob_check(const double target_value, const double lower_bound, const double scale_factor)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed_no_oob_check(const double target_value, const double lower_bound, const double scale_factor)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr_nullable ( const double  target_value,
const bool  reversed,
const double  lower_bound,
const double  upper_bound,
const int32_t  partition_count,
const double  null_val 
)

Definition at line 2300 of file RuntimeFunctions.cpp.

References width_bucket_expr().

2305  {
2306  if (target_value == null_val) {
2307  return INT32_MIN;
2308  }
2309  return width_bucket_expr(
2310  target_value, reversed, lower_bound, upper_bound, partition_count);
2311 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_expr(const double target_value, const bool reversed, const double lower_bound, const double upper_bound, const int32_t partition_count)

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_no_oob_check ( const double  target_value,
const double  lower_bound,
const double  scale_factor 
)

Definition at line 2264 of file RuntimeFunctions.cpp.

References gpu_enabled::lower_bound().

Referenced by width_bucket_expr_no_oob_check().

2266  {
2267  int32_t calc = (target_value - lower_bound) * scale_factor;
2268  return calc + 1;
2269 }
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int32_t width_bucket_nullable ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count,
const double  null_val 
)

Definition at line 2233 of file RuntimeFunctions.cpp.

References width_bucket().

2238  {
2239  if (target_value == null_val) {
2240  return INT32_MIN;
2241  }
2242  return width_bucket(
2243  target_value, lower_bound, upper_bound, scale_factor, partition_count);
2244 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the call graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count 
)

Definition at line 2219 of file RuntimeFunctions.cpp.

Referenced by width_bucket_expr(), and width_bucket_reversed_nullable().

2223  {
2224  if (target_value > lower_bound) {
2225  return 0;
2226  } else if (target_value <= upper_bound) {
2227  return partition_count + 1;
2228  }
2229  return ((lower_bound - target_value) * scale_factor) + 1;
2230 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed_no_oob_check ( const double  target_value,
const double  lower_bound,
const double  scale_factor 
)

Definition at line 2272 of file RuntimeFunctions.cpp.

Referenced by width_bucket_expr_no_oob_check().

2274  {
2275  int32_t calc = (lower_bound - target_value) * scale_factor;
2276  return calc + 1;
2277 }
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the caller graph for this function:

RUNTIME_EXPORT ALWAYS_INLINE int32_t width_bucket_reversed_nullable ( const double  target_value,
const double  lower_bound,
const double  upper_bound,
const double  scale_factor,
const int32_t  partition_count,
const double  null_val 
)

Definition at line 2247 of file RuntimeFunctions.cpp.

References width_bucket_reversed().

2252  {
2253  if (target_value == null_val) {
2254  return INT32_MIN;
2255  }
2256  return width_bucket_reversed(
2257  target_value, lower_bound, upper_bound, scale_factor, partition_count);
2258 }
DEVICE auto upper_bound(ARGS &&...args)
Definition: gpu_enabled.h:123
RUNTIME_EXPORT ALWAYS_INLINE DEVICE int32_t width_bucket_reversed(const double target_value, const double lower_bound, const double upper_bound, const double scale_factor, const int32_t partition_count)
DEVICE auto lower_bound(ARGS &&...args)
Definition: gpu_enabled.h:78

+ Here is the call graph for this function:

GPU_RT_STUB void write_back_non_grouped_agg ( int64_t *  input_buffer,
int64_t *  output_buffer,
const int32_t  num_agg_cols 
)

Definition at line 1810 of file RuntimeFunctions.cpp.

1812  {};
RUNTIME_EXPORT NEVER_INLINE void write_back_nop ( int64_t *  dest,
int64_t *  src,
const int32_t  sz 
)

Definition at line 1872 of file RuntimeFunctions.cpp.

1874  {
1875 #ifndef _WIN32
1876  // the body is not really needed, just make sure the call is not optimized away
1877  assert(dest);
1878 #endif
1879 }