mkql_block_logical.cpp 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495
  1. #include "mkql_block_logical.h"
  2. #include <yql/essentials/minikql/arrow/arrow_defs.h>
  3. #include <yql/essentials/minikql/arrow/mkql_bit_utils.h>
  4. #include <yql/essentials/minikql/arrow/arrow_util.h>
  5. #include <yql/essentials/minikql/mkql_type_builder.h>
  6. #include <yql/essentials/minikql/computation/mkql_block_impl.h>
  7. #include <yql/essentials/minikql/computation/mkql_computation_node_holders.h>
  8. #include <yql/essentials/minikql/mkql_node_builder.h>
  9. #include <yql/essentials/minikql/mkql_node_cast.h>
  10. #include <arrow/util/bitmap.h>
  11. #include <arrow/util/bitmap_ops.h>
  12. #include <arrow/util/bit_util.h>
  13. #include <arrow/array/array_primitive.h>
  14. #include <arrow/array/util.h>
  15. namespace NKikimr {
  16. namespace NMiniKQL {
  17. namespace {
  18. using arrow::internal::Bitmap;
  19. std::shared_ptr<arrow::Buffer> CopyBitmap(arrow::MemoryPool* pool, const std::shared_ptr<arrow::Buffer>& bitmap, int64_t offset, int64_t len) {
  20. std::shared_ptr<arrow::Buffer> result = bitmap;
  21. if (bitmap && offset != 0) {
  22. result = ARROW_RESULT(arrow::AllocateBitmap(len, pool));
  23. arrow::internal::CopyBitmap(bitmap->data(), offset, len, result->mutable_data(), 0);
  24. }
  25. return result;
  26. }
  27. std::shared_ptr<arrow::Buffer> CopySparseBitmap(arrow::MemoryPool* pool, const std::shared_ptr<arrow::Buffer>& bitmap, int64_t offset, int64_t len) {
  28. std::shared_ptr<arrow::Buffer> result = bitmap;
  29. if (bitmap && offset != 0) {
  30. result = ARROW_RESULT(arrow::AllocateBuffer(len, pool));
  31. std::memcpy(result->mutable_data(), bitmap->data() + offset, len);
  32. }
  33. return result;
  34. }
  35. arrow::Datum MakeNullArray(arrow::MemoryPool* pool, int64_t len) {
  36. std::shared_ptr<arrow::Array> arr = ARROW_RESULT(arrow::MakeArrayOfNull(arrow::uint8(), len, pool));
  37. return arr;
  38. }
  39. bool IsAllEqualsTo(const arrow::Datum& datum, bool value) {
  40. if (datum.null_count() != 0) {
  41. return false;
  42. }
  43. if (datum.is_scalar()) {
  44. return (datum.scalar_as<arrow::UInt8Scalar>().value & 1u) == value;
  45. }
  46. size_t len = datum.array()->length;
  47. size_t popCnt = GetSparseBitmapPopCount(datum.array()->GetValues<ui8>(1), len);
  48. return popCnt == (value ? len : 0);
  49. }
  50. class TAndBlockExec {
  51. public:
  52. arrow::Status Exec(arrow::compute::KernelContext* ctx, const arrow::compute::ExecBatch& batch, arrow::Datum* res) const {
  53. auto firstDatum = batch.values[0];
  54. auto secondDatum = batch.values[1];
  55. MKQL_ENSURE(!firstDatum.is_scalar() || !secondDatum.is_scalar(), "Expected at least one array");
  56. if (IsAllEqualsTo(firstDatum, false)) {
  57. // false AND ... = false
  58. if (firstDatum.is_array()) {
  59. *res = firstDatum;
  60. } else {
  61. // need length
  62. *res = MakeFalseArray(ctx->memory_pool(), secondDatum.length());
  63. }
  64. return arrow::Status::OK();
  65. }
  66. if (IsAllEqualsTo(secondDatum, false)) {
  67. // ... AND false = false
  68. if (secondDatum.is_array()) {
  69. *res = secondDatum;
  70. } else {
  71. *res = MakeFalseArray(ctx->memory_pool(), firstDatum.length());
  72. }
  73. return arrow::Status::OK();
  74. }
  75. if (firstDatum.is_scalar()) {
  76. ui8 value = firstDatum.scalar_as<arrow::UInt8Scalar>().value & 1u;
  77. bool valid = firstDatum.scalar()->is_valid;
  78. *res = CalcScalarArray(ctx->memory_pool(), value, valid, secondDatum.array());
  79. } else if (secondDatum.is_scalar()) {
  80. ui8 value = secondDatum.scalar_as<arrow::UInt8Scalar>().value & 1u;
  81. bool valid = secondDatum.scalar()->is_valid;
  82. *res = CalcScalarArray(ctx->memory_pool(), value, valid, firstDatum.array());
  83. } else {
  84. *res = CalcArrayArray(ctx->memory_pool(), firstDatum.array(), secondDatum.array());
  85. }
  86. return arrow::Status::OK();
  87. }
  88. private:
  89. arrow::Datum CalcScalarArray(arrow::MemoryPool* pool, ui8 value, bool valid, const std::shared_ptr<arrow::ArrayData>& arr) const {
  90. bool first_true = valid && value;
  91. bool first_false = valid && !value;
  92. if (first_false) {
  93. return MakeFalseArray(pool, arr->length);
  94. }
  95. if (first_true) {
  96. return arr;
  97. }
  98. // scalar is null -> result is valid _only_ if arr[i] == false
  99. //bitmap = bitmap and not data[i]
  100. std::shared_ptr<arrow::Buffer> bitmap = ARROW_RESULT(arrow::AllocateBitmap(arr->length, pool));
  101. CompressSparseBitmapNegate(bitmap->mutable_data(), arr->GetValues<ui8>(1), arr->length);
  102. if (arr->buffers[0]) {
  103. bitmap = ARROW_RESULT(arrow::internal::BitmapAnd(pool, arr->GetValues<ui8>(0, 0), arr->offset, bitmap->data(), 0, arr->length, 0));
  104. }
  105. std::shared_ptr<arrow::Buffer> data = CopySparseBitmap(pool, arr->buffers[1], arr->offset, arr->length);
  106. return arrow::ArrayData::Make(arr->type, arr->length, { bitmap, data });
  107. }
  108. arrow::Datum CalcArrayArray(arrow::MemoryPool* pool, const std::shared_ptr<arrow::ArrayData>& arr1,
  109. const std::shared_ptr<arrow::ArrayData>& arr2) const
  110. {
  111. Y_ABORT_UNLESS(arr1->length == arr2->length);
  112. auto buf1 = arr1->buffers[0];
  113. auto buf2 = arr2->buffers[0];
  114. const int64_t offset1 = arr1->offset;
  115. const int64_t offset2 = arr2->offset;
  116. const int64_t length = arr1->length;
  117. std::shared_ptr<arrow::Buffer> bitmap;
  118. if (buf1 || buf2) {
  119. bitmap = ARROW_RESULT(arrow::AllocateBitmap(length, pool));
  120. auto first = ARROW_RESULT(arrow::AllocateBitmap(length, pool));
  121. auto second = ARROW_RESULT(arrow::AllocateBitmap(length, pool));
  122. CompressSparseBitmap(first->mutable_data(), arr1->GetValues<ui8>(1), length);
  123. CompressSparseBitmap(second->mutable_data(), arr2->GetValues<ui8>(1), length);
  124. Bitmap v1(first, 0, length);
  125. Bitmap v2(second, 0, length);
  126. Bitmap b(bitmap, 0, length);
  127. std::array<Bitmap, 1> out{b};
  128. //bitmap = first_false | second_false | (first_true & second_true);
  129. //bitmap = (b1 & ~v1) | (b2 & ~v2) | (b1 & v1 & b2 & v2)
  130. if (buf1 && buf2) {
  131. Bitmap b1(buf1, offset1, length);
  132. Bitmap b2(buf2, offset2, length);
  133. std::array<Bitmap, 4> in{b1, v1, b2, v2};
  134. Bitmap::VisitWordsAndWrite(in, &out, [](const std::array<uint64_t, 4>& in, std::array<uint64_t, 1>* out) {
  135. uint64_t b1 = in[0];
  136. uint64_t v1 = in[1];
  137. uint64_t b2 = in[2];
  138. uint64_t v2 = in[3];
  139. out->at(0) = (b1 & ~v1) | (b2 & ~v2) | (b1 & v1 & b2 & v2);
  140. });
  141. } else if (buf1) {
  142. Bitmap b1(buf1, offset1, length);
  143. std::array<Bitmap, 3> in{b1, v1, v2};
  144. Bitmap::VisitWordsAndWrite(in, &out, [](const std::array<uint64_t, 3>& in, std::array<uint64_t, 1>* out) {
  145. uint64_t b1 = in[0];
  146. uint64_t v1 = in[1];
  147. uint64_t v2 = in[2];
  148. out->at(0) = (b1 & ~v1) | (~v2) | (b1 & v1 & v2);
  149. });
  150. } else {
  151. Bitmap b2(buf2, offset2, length);
  152. std::array<Bitmap, 3> in{v1, b2, v2};
  153. Bitmap::VisitWordsAndWrite(in, &out, [](const std::array<uint64_t, 3>& in, std::array<uint64_t, 1>* out) {
  154. uint64_t v1 = in[0];
  155. uint64_t b2 = in[1];
  156. uint64_t v2 = in[2];
  157. out->at(0) = (~v1) | (b2 & ~v2) | (v1 & b2 & v2);
  158. });
  159. }
  160. }
  161. std::shared_ptr<arrow::Buffer> data = ARROW_RESULT(arrow::AllocateBuffer(length, pool));
  162. AndSparseBitmaps(data->mutable_data(), arr1->GetValues<ui8>(1), arr2->GetValues<ui8>(1), length);
  163. return arrow::ArrayData::Make(arr1->type, length, { bitmap, data });
  164. }
  165. };
  166. class TOrBlockExec {
  167. public:
  168. arrow::Status Exec(arrow::compute::KernelContext* ctx, const arrow::compute::ExecBatch& batch, arrow::Datum* res) const {
  169. auto firstDatum = batch.values[0];
  170. auto secondDatum = batch.values[1];
  171. MKQL_ENSURE(!firstDatum.is_scalar() || !secondDatum.is_scalar(), "Expected at least one array");
  172. if (IsAllEqualsTo(firstDatum, true)) {
  173. // true OR ... = true
  174. if (firstDatum.is_array()) {
  175. *res = firstDatum;
  176. } else {
  177. // need length
  178. *res = MakeTrueArray(ctx->memory_pool(), secondDatum.length());
  179. }
  180. return arrow::Status::OK();
  181. }
  182. if (IsAllEqualsTo(secondDatum, true)) {
  183. // ... OR true = true
  184. if (secondDatum.is_array()) {
  185. *res = secondDatum;
  186. } else {
  187. *res = MakeTrueArray(ctx->memory_pool(), firstDatum.length());
  188. }
  189. return arrow::Status::OK();
  190. }
  191. if (firstDatum.is_scalar()) {
  192. ui8 value = firstDatum.scalar_as<arrow::UInt8Scalar>().value;
  193. bool valid = firstDatum.scalar()->is_valid;
  194. *res = CalcScalarArray(ctx->memory_pool(), value, valid, secondDatum.array());
  195. } else if (secondDatum.is_scalar()) {
  196. ui8 value = secondDatum.scalar_as<arrow::UInt8Scalar>().value;
  197. bool valid = secondDatum.scalar()->is_valid;
  198. *res = CalcScalarArray(ctx->memory_pool(), value, valid, firstDatum.array());
  199. } else {
  200. *res = CalcArrayArray(ctx->memory_pool(), firstDatum.array(), secondDatum.array());
  201. }
  202. return arrow::Status::OK();
  203. }
  204. private:
  205. arrow::Datum CalcScalarArray(arrow::MemoryPool* pool, ui8 value, bool valid, const std::shared_ptr<arrow::ArrayData>& arr) const {
  206. bool first_true = valid && value;
  207. bool first_false = valid && !value;
  208. if (first_true) {
  209. return MakeTrueArray(pool, arr->length);
  210. }
  211. if (first_false) {
  212. return arr;
  213. }
  214. // scalar is null -> result is valid _only_ if arr[i] == true
  215. //bitmap = bitmap and data[i]
  216. std::shared_ptr<arrow::Buffer> bitmap = ARROW_RESULT(arrow::AllocateBitmap(arr->length, pool));
  217. CompressSparseBitmap(bitmap->mutable_data(), arr->GetValues<ui8>(1), arr->length);
  218. if (arr->buffers[0]) {
  219. bitmap = ARROW_RESULT(arrow::internal::BitmapAnd(pool, arr->GetValues<ui8>(0, 0), arr->offset, bitmap->data(), 0, arr->length, 0));
  220. }
  221. std::shared_ptr<arrow::Buffer> data = CopySparseBitmap(pool, arr->buffers[1], arr->offset, arr->length);
  222. return arrow::ArrayData::Make(arr->type, arr->length, { bitmap, data });
  223. }
  224. arrow::Datum CalcArrayArray(arrow::MemoryPool* pool, const std::shared_ptr<arrow::ArrayData>& arr1,
  225. const std::shared_ptr<arrow::ArrayData>& arr2) const
  226. {
  227. Y_ABORT_UNLESS(arr1->length == arr2->length);
  228. auto buf1 = arr1->buffers[0];
  229. auto buf2 = arr2->buffers[0];
  230. const int64_t offset1 = arr1->offset;
  231. const int64_t offset2 = arr2->offset;
  232. const int64_t length = arr1->length;
  233. std::shared_ptr<arrow::Buffer> bitmap;
  234. if (buf1 || buf2) {
  235. bitmap = ARROW_RESULT(arrow::AllocateBitmap(length, pool));
  236. auto first = ARROW_RESULT(arrow::AllocateBitmap(length, pool));
  237. auto second = ARROW_RESULT(arrow::AllocateBitmap(length, pool));
  238. CompressSparseBitmap(first->mutable_data(), arr1->GetValues<ui8>(1), length);
  239. CompressSparseBitmap(second->mutable_data(), arr2->GetValues<ui8>(1), length);
  240. Bitmap v1(first, 0, length);
  241. Bitmap v2(second, 0, length);
  242. Bitmap b(bitmap, 0, length);
  243. std::array<Bitmap, 1> out{b};
  244. //bitmap = first_true | second_true | (first_false & second_false);
  245. //bitmap = (b1 & v1) | (b2 & v2) | (b1 & ~v1 & b2 & ~v2)
  246. if (buf1 && buf2) {
  247. Bitmap b1(buf1, offset1, length);
  248. Bitmap b2(buf2, offset2, length);
  249. std::array<Bitmap, 4> in{b1, v1, b2, v2};
  250. Bitmap::VisitWordsAndWrite(in, &out, [](const std::array<uint64_t, 4>& in, std::array<uint64_t, 1>* out) {
  251. uint64_t b1 = in[0];
  252. uint64_t v1 = in[1];
  253. uint64_t b2 = in[2];
  254. uint64_t v2 = in[3];
  255. out->at(0) = (b1 & v1) | (b2 & v2) | (b1 & ~v1 & b2 & ~v2);
  256. });
  257. } else if (buf1) {
  258. Bitmap b1(buf1, offset1, length);
  259. std::array<Bitmap, 3> in{b1, v1, v2};
  260. Bitmap::VisitWordsAndWrite(in, &out, [](const std::array<uint64_t, 3>& in, std::array<uint64_t, 1>* out) {
  261. uint64_t b1 = in[0];
  262. uint64_t v1 = in[1];
  263. uint64_t v2 = in[2];
  264. out->at(0) = (b1 & v1) | v2 | (b1 & ~v1 & ~v2);
  265. });
  266. } else {
  267. Bitmap b2(buf2, offset2, length);
  268. std::array<Bitmap, 3> in{v1, b2, v2};
  269. Bitmap::VisitWordsAndWrite(in, &out, [](const std::array<uint64_t, 3>& in, std::array<uint64_t, 1>* out) {
  270. uint64_t v1 = in[0];
  271. uint64_t b2 = in[1];
  272. uint64_t v2 = in[2];
  273. out->at(0) = v1 | (b2 & v2) | (~v1 & b2 & ~v2);
  274. });
  275. }
  276. }
  277. std::shared_ptr<arrow::Buffer> data = ARROW_RESULT(arrow::AllocateBuffer(length, pool));
  278. OrSparseBitmaps(data->mutable_data(), arr1->GetValues<ui8>(1), arr2->GetValues<ui8>(1), length);
  279. return arrow::ArrayData::Make(arr1->type, length, { bitmap, data });
  280. }
  281. };
  282. class TXorBlockExec {
  283. public:
  284. arrow::Status Exec(arrow::compute::KernelContext* ctx, const arrow::compute::ExecBatch& batch, arrow::Datum* res) const {
  285. auto firstDatum = batch.values[0];
  286. auto secondDatum = batch.values[1];
  287. MKQL_ENSURE(!firstDatum.is_scalar() || !secondDatum.is_scalar(), "Expected at least one array");
  288. if (firstDatum.null_count() == firstDatum.length()) {
  289. if (firstDatum.is_array()) {
  290. *res = firstDatum;
  291. } else {
  292. *res = MakeNullArray(ctx->memory_pool(), secondDatum.length());
  293. }
  294. return arrow::Status::OK();
  295. }
  296. if (secondDatum.null_count() == secondDatum.length()) {
  297. if (secondDatum.is_array()) {
  298. *res = secondDatum;
  299. } else {
  300. *res = MakeNullArray(ctx->memory_pool(), firstDatum.length());
  301. }
  302. return arrow::Status::OK();
  303. }
  304. if (firstDatum.is_scalar()) {
  305. ui8 value = firstDatum.scalar_as<arrow::UInt8Scalar>().value;
  306. *res = CalcScalarArray(ctx->memory_pool(), value, secondDatum.array());
  307. } else if (secondDatum.is_scalar()) {
  308. ui8 value = secondDatum.scalar_as<arrow::UInt8Scalar>().value;
  309. *res = CalcScalarArray(ctx->memory_pool(), value, firstDatum.array());
  310. } else {
  311. *res = CalcArrayArray(ctx->memory_pool(), firstDatum.array(), secondDatum.array());
  312. }
  313. return arrow::Status::OK();
  314. }
  315. private:
  316. arrow::Datum CalcScalarArray(arrow::MemoryPool* pool, ui8 value, const std::shared_ptr<arrow::ArrayData>& arr) const {
  317. std::shared_ptr<arrow::Buffer> bitmap = CopyBitmap(pool, arr->buffers[0], arr->offset, arr->length);
  318. std::shared_ptr<arrow::Buffer> data = ARROW_RESULT(arrow::AllocateBuffer(arr->length, pool));
  319. XorSparseBitmapScalar(data->mutable_data(), value, arr->GetValues<ui8>(1), arr->length);
  320. return arrow::ArrayData::Make(arr->type, arr->length, { bitmap, data });
  321. }
  322. arrow::Datum CalcArrayArray(arrow::MemoryPool* pool, const std::shared_ptr<arrow::ArrayData>& arr1,
  323. const std::shared_ptr<arrow::ArrayData>& arr2) const
  324. {
  325. Y_ABORT_UNLESS(arr1->length == arr2->length);
  326. auto b1 = arr1->buffers[0];
  327. auto b2 = arr2->buffers[0];
  328. const int64_t offset1 = arr1->offset;
  329. const int64_t offset2 = arr2->offset;
  330. const int64_t length = arr1->length;
  331. std::shared_ptr<arrow::Buffer> bitmap;
  332. if (b1 && b2) {
  333. bitmap = ARROW_RESULT(arrow::internal::BitmapAnd(pool, b1->data(), offset1, b2->data(), offset2, length, 0));
  334. } else {
  335. bitmap = CopyBitmap(pool, b1 ? b1 : b2, b1 ? offset1 : offset2, length);
  336. }
  337. std::shared_ptr<arrow::Buffer> data = ARROW_RESULT(arrow::AllocateBuffer(length, pool));
  338. XorSparseBitmaps(data->mutable_data(), arr1->GetValues<ui8>(1), arr2->GetValues<ui8>(1), length);
  339. return arrow::ArrayData::Make(arr1->type, length, { bitmap, data });
  340. }
  341. };
  342. class TNotBlockExec {
  343. public:
  344. arrow::Status Exec(arrow::compute::KernelContext* ctx, const arrow::compute::ExecBatch& batch, arrow::Datum* res) const {
  345. const auto& input = batch.values[0];
  346. MKQL_ENSURE(input.is_array(), "Expected array");
  347. const auto& arr = *input.array();
  348. if (arr.GetNullCount() == arr.length) {
  349. *res = input;
  350. } else {
  351. auto bitmap = CopyBitmap(ctx->memory_pool(), arr.buffers[0], arr.offset, arr.length);
  352. std::shared_ptr<arrow::Buffer> data = ARROW_RESULT(arrow::AllocateBuffer(arr.length, ctx->memory_pool()));;
  353. NegateSparseBitmap(data->mutable_data(), arr.GetValues<ui8>(1), arr.length);
  354. *res = arrow::ArrayData::Make(arr.type, arr.length, { bitmap, data });
  355. }
  356. return arrow::Status::OK();
  357. }
  358. };
  359. template <typename TExec>
  360. std::shared_ptr<arrow::compute::ScalarKernel> MakeKernel(const TVector<TType*>& argTypes, TType* resultType) {
  361. std::shared_ptr<arrow::DataType> returnArrowType;
  362. MKQL_ENSURE(ConvertArrowType(AS_TYPE(TBlockType, resultType)->GetItemType(), returnArrowType), "Unsupported arrow type");
  363. auto exec = std::make_shared<TExec>();
  364. auto kernel = std::make_shared<arrow::compute::ScalarKernel>(ConvertToInputTypes(argTypes), ConvertToOutputType(resultType),
  365. [exec](arrow::compute::KernelContext* ctx, const arrow::compute::ExecBatch& batch, arrow::Datum* res) {
  366. return exec->Exec(ctx, batch, res);
  367. });
  368. kernel->null_handling = arrow::compute::NullHandling::COMPUTED_NO_PREALLOCATE;
  369. return kernel;
  370. }
  371. IComputationNode* WrapBlockLogical(std::string_view name, TCallable& callable, const TComputationNodeFactoryContext& ctx) {
  372. MKQL_ENSURE(callable.GetInputsCount() == 2, "Expected 2 args");
  373. auto firstType = AS_TYPE(TBlockType, callable.GetInput(0).GetStaticType());
  374. auto secondType = AS_TYPE(TBlockType, callable.GetInput(1).GetStaticType());
  375. bool isOpt1, isOpt2;
  376. MKQL_ENSURE(UnpackOptionalData(firstType->GetItemType(), isOpt1)->GetSchemeType() == NUdf::TDataType<bool>::Id,
  377. "Requires boolean args.");
  378. MKQL_ENSURE(UnpackOptionalData(secondType->GetItemType(), isOpt2)->GetSchemeType() == NUdf::TDataType<bool>::Id,
  379. "Requires boolean args.");
  380. auto compute1 = LocateNode(ctx.NodeLocator, callable, 0);
  381. auto compute2 = LocateNode(ctx.NodeLocator, callable, 1);
  382. TComputationNodePtrVector argsNodes = { compute1, compute2 };
  383. TVector<TType*> argsTypes = { callable.GetInput(0).GetStaticType(), callable.GetInput(1).GetStaticType() };
  384. std::shared_ptr<arrow::compute::ScalarKernel> kernel;
  385. if (name == "And") {
  386. kernel = MakeKernel<TAndBlockExec>(argsTypes, callable.GetType()->GetReturnType());
  387. } else if (name == "Or") {
  388. kernel = MakeKernel<TOrBlockExec>(argsTypes, callable.GetType()->GetReturnType());
  389. } else {
  390. kernel = MakeKernel<TXorBlockExec>(argsTypes, callable.GetType()->GetReturnType());
  391. }
  392. return new TBlockFuncNode(ctx.Mutables, name, std::move(argsNodes), argsTypes, *kernel, kernel);
  393. }
  394. } // namespace
  395. IComputationNode* WrapBlockAnd(TCallable& callable, const TComputationNodeFactoryContext& ctx) {
  396. return WrapBlockLogical("And", callable, ctx);
  397. }
  398. IComputationNode* WrapBlockOr(TCallable& callable, const TComputationNodeFactoryContext& ctx) {
  399. return WrapBlockLogical("Or", callable, ctx);
  400. }
  401. IComputationNode* WrapBlockXor(TCallable& callable, const TComputationNodeFactoryContext& ctx) {
  402. return WrapBlockLogical("Xor", callable, ctx);
  403. }
  404. IComputationNode* WrapBlockNot(TCallable& callable, const TComputationNodeFactoryContext& ctx) {
  405. MKQL_ENSURE(callable.GetInputsCount() == 1, "Expected 1 arg");
  406. auto dataType = AS_TYPE(TBlockType, callable.GetInput(0).GetStaticType());
  407. bool isOpt;
  408. MKQL_ENSURE(UnpackOptionalData(dataType->GetItemType(), isOpt)->GetSchemeType() == NUdf::TDataType<bool>::Id,
  409. "Requires boolean args.");
  410. auto compute = LocateNode(ctx.NodeLocator, callable, 0);
  411. TComputationNodePtrVector argsNodes = { compute };
  412. TVector<TType*> argsTypes = { callable.GetInput(0).GetStaticType() };
  413. auto kernel = MakeKernel<TNotBlockExec>(argsTypes, argsTypes[0]);
  414. return new TBlockFuncNode(ctx.Mutables, "Not", std::move(argsNodes), argsTypes, *kernel, kernel);
  415. }
  416. }
  417. }