001/**
002 * Copyright (C) 2006-2025 Talend Inc. - www.talend.com
003 *
004 * Licensed under the Apache License, Version 2.0 (the "License");
005 * you may not use this file except in compliance with the License.
006 * You may obtain a copy of the License at
007 *
008 * http://www.apache.org/licenses/LICENSE-2.0
009 *
010 * Unless required by applicable law or agreed to in writing, software
011 * distributed under the License is distributed on an "AS IS" BASIS,
012 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
013 * See the License for the specific language governing permissions and
014 * limitations under the License.
015 */
016package org.talend.sdk.component.runtime.manager.chain.internal;
017
018import static java.util.Collections.singletonList;
019import static java.util.stream.Collectors.toList;
020import static java.util.stream.Collectors.toMap;
021import static java.util.stream.Collectors.toSet;
022
023import java.io.BufferedReader;
024import java.io.IOException;
025import java.io.InputStream;
026import java.io.InputStreamReader;
027import java.lang.reflect.InvocationTargetException;
028import java.util.AbstractMap;
029import java.util.ArrayList;
030import java.util.Collection;
031import java.util.HashMap;
032import java.util.HashSet;
033import java.util.Iterator;
034import java.util.List;
035import java.util.Map;
036import java.util.ServiceLoader;
037import java.util.Set;
038import java.util.TreeMap;
039import java.util.concurrent.atomic.AtomicBoolean;
040import java.util.concurrent.atomic.AtomicInteger;
041import java.util.concurrent.atomic.AtomicLong;
042import java.util.concurrent.atomic.AtomicReference;
043import java.util.function.Consumer;
044import java.util.function.Function;
045import java.util.function.Supplier;
046
047import javax.json.bind.Jsonb;
048import javax.json.bind.JsonbBuilder;
049import javax.json.bind.JsonbConfig;
050
051import org.talend.sdk.component.api.processor.OutputEmitter;
052import org.talend.sdk.component.api.record.Record;
053import org.talend.sdk.component.api.service.record.RecordBuilderFactory;
054import org.talend.sdk.component.runtime.base.Lifecycle;
055import org.talend.sdk.component.runtime.input.CheckpointState;
056import org.talend.sdk.component.runtime.input.Input;
057import org.talend.sdk.component.runtime.input.Mapper;
058import org.talend.sdk.component.runtime.manager.ComponentManager;
059import org.talend.sdk.component.runtime.manager.chain.AutoChunkProcessor;
060import org.talend.sdk.component.runtime.manager.chain.ChainedMapper;
061import org.talend.sdk.component.runtime.manager.chain.GroupKeyProvider;
062import org.talend.sdk.component.runtime.manager.chain.Job;
063import org.talend.sdk.component.runtime.output.InputFactory;
064import org.talend.sdk.component.runtime.output.OutputFactory;
065import org.talend.sdk.component.runtime.output.Processor;
066import org.talend.sdk.component.runtime.output.ProcessorImpl;
067import org.talend.sdk.component.runtime.record.RecordBuilderFactoryImpl;
068import org.talend.sdk.component.runtime.record.RecordConverters;
069
070import lombok.AllArgsConstructor;
071import lombok.Data;
072import lombok.Getter;
073import lombok.RequiredArgsConstructor;
074import lombok.extern.slf4j.Slf4j;
075
076public class JobImpl implements Job {
077
078    public static class NodeBuilderImpl implements NodeBuilder {
079
080        private final List<Component> nodes = new ArrayList<>();
081
082        private final Map<String, Map<String, Object>> properties = new HashMap<>();
083
084        @Override
085        public NodeBuilder property(final String name, final Object value) {
086            final Component lastComponent = nodes.get(nodes.size() - 1);
087            properties.computeIfAbsent(lastComponent.getId(), s -> new HashMap<>());
088            properties.get(lastComponent.getId()).put(name, value);
089            return this;
090        }
091
092        @Override
093        public NodeBuilder checkpoint(final Consumer<CheckpointState> checkpoint) {
094            final Component lastComponent = nodes.get(nodes.size() - 1);
095            lastComponent.setCheckpointCallback(checkpoint);
096            return this;
097        }
098
099        @Override
100        public NodeBuilder component(final String id, final String uri) {
101            nodes.add(new Component(id, DSLParser.parse(uri)));
102            return this;
103        }
104
105        @Override
106        public LinkBuilder connections() {
107            return new LinkBuilder(nodes, properties);
108        }
109
110    }
111
112    @Slf4j
113    @RequiredArgsConstructor
114    public static class LinkBuilder implements Job.FromBuilder, Builder {
115
116        private final List<Component> nodes;
117
118        private final Map<String, Map<String, Object>> properties;
119
120        private final List<Edge> edges = new ArrayList<>();
121
122        private final Map<Integer, Set<Component>> levels = new TreeMap<>();
123
124        @Override
125        public ToBuilder from(final String id, final String branch) {
126            final Component from = nodes
127                    .stream()
128                    .filter(node -> node.getId().equals(id))
129                    .findFirst()
130                    .orElseThrow(
131                            () -> new IllegalStateException("No component with id '" + id + "' in created components"));
132
133            edges
134                    .stream()
135                    .filter(edge -> edge.getFrom().getNode().getId().equals(id)
136                            && edge.getFrom().getBranch().equals(branch))
137                    .findFirst()
138                    .ifPresent(edge -> {
139                        throw new IllegalStateException(
140                                "(" + id + "," + branch + ") node is already connected : " + edge);
141                    });
142
143            return new To(nodes, edges, new Connection(from, branch), this);
144        }
145
146        public void doBuild() {
147            final List<Component> orphans = nodes
148                    .stream()
149                    .filter(n -> edges
150                            .stream()
151                            .noneMatch(l -> l.getFrom().getNode().equals(n) || l.getTo().getNode().equals(n)))
152                    .collect(toList());
153            orphans.forEach(o -> log.warn("component '" + o + "' is orphan in this graph. it will be ignored."));
154            nodes.removeAll(orphans);
155
156            // set up sources
157            nodes
158                    .stream()
159                    .filter(node -> edges.stream().noneMatch(l -> l.getTo().getNode().equals(node)))
160                    .forEach(component -> component.setSource(true));
161            calculateGraphOrder(0, new HashSet<>(nodes), new ArrayList<>(edges), levels);
162        }
163
164        private void calculateGraphOrder(final int order, final Set<Component> nodes, final List<Edge> edges,
165                final Map<Integer, Set<Component>> orderedGraph) {
166            if (edges.isEmpty()) {
167                orderedGraph.put(order, nodes); // last nodes
168                return;
169            }
170            final Set<Component> startingNodes = nodes
171                    .stream()
172                    .filter(node -> edges.stream().noneMatch(l -> l.getTo().getNode().equals(node)))
173                    .collect(toSet());
174            if (order == 0 && startingNodes.isEmpty()) {
175                throw new IllegalStateException("There is no starting component in this graph.");
176            }
177            final List<Edge> level = edges
178                    .stream()
179                    .filter(edge -> startingNodes.contains(edge.getFrom().getNode()))
180                    .filter(edge -> edges
181                            .stream()
182                            .filter(others -> edge.getTo().getNode().equals(others.getTo().getNode()))
183                            .map(others -> others.getFrom().getNode())
184                            .allMatch(startingNodes::contains))
185                    .collect(toList());
186            if (level.isEmpty()) {
187                throw new IllegalStateException("the job pipeline has cyclic connection");
188            }
189            final Set<Component> components = level.stream().map(edge -> edge.getFrom().getNode()).collect(toSet());
190            orderedGraph.put(order, components);
191            edges.removeAll(level);
192            nodes.removeAll(components);
193            calculateGraphOrder(order + 1, nodes, edges, orderedGraph);
194        }
195
196        @Override
197        public JobExecutor build() {
198            doBuild();
199            return new JobExecutor(levels, edges, properties);
200        }
201    }
202
203    @RequiredArgsConstructor
204    private static class To implements ToBuilder {
205
206        private final List<Component> nodes;
207
208        private final List<Edge> edges;
209
210        private final Connection from;
211
212        private final Builder builder;
213
214        @Override
215        public Builder to(final String id, final String branch) {
216            final Component to = nodes
217                    .stream()
218                    .filter(node -> node.getId().equals(id))
219                    .findFirst()
220                    .orElseThrow(() -> new IllegalStateException("No component with id '" + id + "' in created nodes"));
221
222            edges
223                    .stream()
224                    .filter(edge -> edge.getTo().getNode().getId().equals(id)
225                            && edge.getTo().getBranch().equals(branch))
226                    .findFirst()
227                    .ifPresent(edge -> {
228                        throw new IllegalStateException(
229                                "(" + id + "," + branch + ") node is already connected : " + edge);
230                    });
231            edges.add(new Edge(from, new Connection(to, branch)));
232            return builder;
233        }
234    }
235
236    @Getter
237    @Slf4j
238    @RequiredArgsConstructor
239    public static class JobExecutor implements Job.ExecutorBuilder {
240
241        private final Map<Integer, Set<Component>> levels;
242
243        private final List<Edge> edges;
244
245        private final Map<String, Map<String, Object>> componentProperties;
246
247        private final Map<String, Object> jobProperties = new HashMap<>();
248
249        private final ComponentManager manager = ComponentManager.instance();
250
251        @Override
252        public ExecutorBuilder property(final String name, final Object value) {
253            jobProperties.put(name, value);
254            return this;
255        }
256
257        @Override
258        public void run() {
259            ExecutorBuilder runner = this;
260            final Object o = jobProperties.get(ExecutorBuilder.class.getName());
261            if (ExecutorBuilder.class.isInstance(o)) {
262                runner = ExecutorBuilder.class.cast(o);
263            } else if (Class.class.isInstance(o)) {
264                runner = newRunner(Class.class.cast(o));
265            } else if (String.class.isInstance(o)) {
266                final String name = String.class.cast(o).trim();
267                if (!"standalone".equalsIgnoreCase(name) && !"default".equalsIgnoreCase(name)
268                        && !"local".equalsIgnoreCase(name)) {
269                    if ("beam".equalsIgnoreCase(name)) {
270                        try {
271                            runner = newRunner(Thread.currentThread().getContextClassLoader(),
272                                    "org.talend.sdk.component.runtime.beam.chain.impl.BeamExecutor");
273                        } catch (final RuntimeException re) {
274                            log
275                                    .error("Can't instantiate beam job integration, "
276                                            + "did you add org.talend.sdk.component:component-runtime-beam in your dependencies",
277                                            re);
278                        }
279                    } else {
280                        runner = newRunner(Thread.currentThread().getContextClassLoader(), name);
281                    }
282                }
283            } else if (o != null) {
284                throw new IllegalArgumentException(o + " is not an ExecutionBuilder");
285            } else {
286                final ClassLoader loader = Thread.currentThread().getContextClassLoader();
287                try (final InputStream stream =
288                        loader.getResourceAsStream("META-INF/services/" + ExecutorBuilder.class.getName())) {
289                    if (stream != null) {
290                        runner = new BufferedReader(new InputStreamReader(stream))
291                                .lines()
292                                .map(String::trim)
293                                .filter(s -> !s.startsWith("#") && !s.isEmpty())
294                                .findFirst()
295                                .map(clazz -> newRunner(loader, clazz))
296                                .orElse(this);
297                    }
298                } catch (final IOException e) {
299                    log.debug(e.getMessage(), e);
300                }
301            }
302
303            if (runner == this) {
304                JobExecutor.class.cast(runner).localRun();
305            } else {
306                runner.run();
307            }
308        }
309
310        private ExecutorBuilder newRunner(final ClassLoader loader, final String clazz) {
311            try {
312                final Class<? extends ExecutorBuilder> aClass =
313                        (Class<? extends ExecutorBuilder>) loader.loadClass(clazz);
314                return newRunner(aClass);
315            } catch (final ClassNotFoundException e) {
316                throw new IllegalArgumentException(e);
317            }
318        }
319
320        private ExecutorBuilder newRunner(final Class<? extends ExecutorBuilder> runnerType) {
321            try {
322                try {
323                    return runnerType.getConstructor(JobExecutor.class).newInstance(JobExecutor.this);
324                } catch (final NoSuchMethodException e) {
325                    return runnerType.getConstructor().newInstance();
326                }
327            } catch (final NoSuchMethodException | InstantiationException | IllegalAccessException e1) {
328                throw new IllegalArgumentException(e1);
329            } catch (InvocationTargetException e1) {
330                throw new IllegalArgumentException(e1.getTargetException());
331            }
332        }
333
334        private void localRun() {
335            final long maxRecords =
336                    Long.parseLong(String.valueOf(getJobProperties().getOrDefault("streaming.maxRecords", "-1")));
337            final Map<String, InputRunner> inputs =
338                    levels.values().stream().flatMap(Collection::stream).filter(Component::isSource).map(n -> {
339                        final Mapper mapper = manager
340                                .findMapper(n.getNode().getFamily(), n.getNode().getComponent(),
341                                        n.getNode().getVersion(), n.getNode().getConfiguration())
342                                .orElseThrow(() -> new IllegalStateException("No mapper found for: " + n.getNode()));
343                        return new AbstractMap.SimpleEntry<>(n.getId(), new InputRunner(mapper, maxRecords,
344                                n.getCheckpointCallback()));
345                    }).collect(toMap(AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue));
346
347            final Map<String, AutoChunkProcessor> processors = levels
348                    .values()
349                    .stream()
350                    .flatMap(Collection::stream)
351                    .filter(component -> !component.isSource())
352                    .map(component -> {
353                        final Processor processor = manager
354                                .findProcessor(component.getNode().getFamily(), component.getNode().getComponent(),
355                                        component.getNode().getVersion(), component.getNode().getConfiguration())
356                                .orElseThrow(() -> new IllegalStateException(
357                                        "No processor found for:" + component.getNode()));
358                        final AtomicInteger maxBatchSize = new AtomicInteger(1);
359                        if (ProcessorImpl.class.isInstance(processor)) {
360                            ProcessorImpl.class
361                                    .cast(processor)
362                                    .getInternalConfiguration()
363                                    .entrySet()
364                                    .stream()
365                                    .filter(it -> it.getKey().endsWith("$maxBatchSize") && it.getValue() != null
366                                            && !it.getValue().trim().isEmpty())
367                                    .findFirst()
368                                    .ifPresent(val -> {
369                                        try {
370                                            maxBatchSize.set(Integer.parseInt(val.getValue().trim()));
371                                        } catch (final NumberFormatException nfe) {
372                                            throw new IllegalArgumentException("Invalid configuratoin: " + val);
373                                        }
374                                    });
375                        }
376                        return new AbstractMap.SimpleEntry<>(component.getId(),
377                                new AutoChunkProcessor(maxBatchSize.get(), processor));
378                    })
379                    .collect(toMap(AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue));
380
381            final RecordConverters.MappingMetaRegistry registry = new RecordConverters.MappingMetaRegistry();
382            final AtomicReference<DataOutputFactory> outs = new AtomicReference<>();
383            try {
384                final Map<String, AtomicBoolean> sourcesWithData = levels
385                        .values()
386                        .stream()
387                        .flatMap(Collection::stream)
388                        .filter(Component::isSource)
389                        .map(component -> new AbstractMap.SimpleEntry<>(component.getId(), new AtomicBoolean(true)))
390                        .collect(toMap(AbstractMap.SimpleEntry::getKey, AbstractMap.SimpleEntry::getValue));
391                processors.values().forEach(Lifecycle::start); // start processor
392                final Map<String, Map<String, Map<String, Collection<Record>>>> flowData = new HashMap<>();
393                final AtomicBoolean running = new AtomicBoolean(true);
394                do {
395                    levels.forEach((level, components) -> components.forEach((Component component) -> {
396                        if (component.isSource()) {
397                            final InputRunner source = inputs.get(component.getId());
398                            final Record data = source.next();
399                            if (data == null) {
400                                sourcesWithData.get(component.getId()).set(false);
401                                return;
402                            }
403                            final String key = getKeyProvider(component.getId())
404                                    .apply(new GroupContextImpl(data, component.getId(), "__default__"));
405                            flowData.computeIfAbsent(component.getId(), s -> new HashMap<>());
406                            flowData.get(component.getId()).computeIfAbsent("__default__", s -> new TreeMap<>());
407                            flowData
408                                    .get(component.getId())
409                                    .get("__default__")
410                                    .computeIfAbsent(key, k -> new ArrayList<>())
411                                    .add(data);
412                        } else {
413                            final List<Edge> connections =
414                                    getConnections(getEdges(), component, e -> e.getTo().getNode());
415                            final DataInputFactory dataInputFactory = new DataInputFactory();
416                            if (connections.size() == 1) {
417                                final Edge edge = connections.get(0);
418                                final String fromId = edge.getFrom().getNode().getId();
419                                final String fromBranch = edge.getFrom().getBranch();
420                                final String toBranch = edge.getTo().getBranch();
421
422                                final Map<String, Map<String, Collection<Record>>> idData = flowData.get(fromId);
423                                final Record data = idData == null ? null : pollFirst(idData.get(fromBranch));
424                                if (data != null) {
425                                    dataInputFactory.withInput(toBranch, singletonList(data));
426                                }
427                            } else { // need grouping
428                                final Map<String, Map<String, Collection<Record>>> availableDataForStep =
429                                        new HashMap<>();
430                                connections.forEach(edge -> {
431                                    final String fromId = edge.getFrom().getNode().getId();
432                                    final String fromBranch = edge.getFrom().getBranch();
433                                    final String toBranch = edge.getTo().getBranch();
434                                    final Map<String, Collection<Record>> data =
435                                            flowData.get(fromId) == null ? null : flowData.get(fromId).get(fromBranch);
436                                    if (data != null && !data.isEmpty()) {
437                                        availableDataForStep.put(toBranch, data);
438                                    }
439                                });
440
441                                final Map<String, String> joined = joinWithFusionSort(availableDataForStep);
442                                if (!joined.isEmpty() && connections.size() == joined.size()) {
443                                    joined.forEach((k, v) -> {
444                                        final Collection data = availableDataForStep.get(k).remove(v);
445                                        dataInputFactory.withInput(k, data);
446                                    });
447                                }
448                            }
449                            if (dataInputFactory.inputs.isEmpty()) {
450                                if (level.equals(levels.size() - 1)
451                                        && sourcesWithData.entrySet().stream().noneMatch(e -> e.getValue().get())) {
452                                    running.set(false);
453                                }
454                                return;
455                            }
456                            final AutoChunkProcessor processor = processors.get(component.getId());
457
458                            final DataOutputFactory dataOutputFactory = new DataOutputFactory(getManager()
459                                    .findPlugin(processor.plugin())
460                                    .get()
461                                    .get(ComponentManager.AllServices.class)
462                                    .getServices(), registry);
463                            processor.onElement(dataInputFactory, dataOutputFactory);
464                            dataOutputFactory.getOutputs().forEach((branch, data) -> data.forEach(item -> {
465                                final String key = getKeyProvider(component.getId())
466                                        .apply(new GroupContextImpl(item, component.getId(), branch));
467                                flowData.computeIfAbsent(component.getId(), s -> new HashMap<>());
468                                flowData.get(component.getId()).computeIfAbsent(branch, s -> new TreeMap<>());
469                                flowData
470                                        .get(component.getId())
471                                        .get(branch)
472                                        .computeIfAbsent(key, k -> new ArrayList<>())
473                                        .add(item);
474                            }));
475                            outs.set(dataOutputFactory);
476                        }
477                    }));
478                } while (running.get());
479            } finally {
480                if (outs.get() != null) {
481                    processors.values().forEach(p -> p.flush(outs.get()));
482                }
483                processors.values().forEach(Lifecycle::stop);
484                inputs.values().forEach(InputRunner::stop);
485                levels
486                        .values()
487                        .stream()
488                        .flatMap(Collection::stream)
489                        .map(Component::getId)
490                        .forEach(LocalSequenceHolder::clean);
491            }
492        }
493
494        private Map<String, String>
495                joinWithFusionSort(final Map<String, Map<String, Collection<Record>>> dataByBranch) {
496            final Map<String, String> join = new HashMap<>();
497            dataByBranch.forEach((branch1, records1) -> {
498                dataByBranch.forEach((branch2, records2) -> {
499                    if (!branch1.equals(branch2)) {
500                        for (final String key1 : records1.keySet()) {
501                            for (final String key2 : records2.keySet()) {
502                                if (key1.equals(key2)) {
503                                    join.putIfAbsent(branch1, key1);
504                                    join.putIfAbsent(branch2, key2);
505                                } else if (key1.compareTo(key2) < 0) {
506                                    break;// see fusion sort
507                                }
508                            }
509                        }
510                    }
511                });
512            });
513            return join;
514        }
515
516        private Record pollFirst(final Map<String, Collection<Record>> data) {
517            if (data == null || data.isEmpty()) {
518                return null;
519            }
520            while (!data.isEmpty()) {
521                final String key = data.keySet().iterator().next();
522                final Collection<Record> items = data.get(key);
523                if (!items.isEmpty()) {
524                    final Iterator<Record> iterator = items.iterator();
525                    final Record item = iterator.next();
526                    iterator.remove();
527                    return item;
528                } else {
529                    data.remove(key);
530                }
531            }
532            return null;
533        }
534
535        private List<Job.Edge> getConnections(final List<Job.Edge> edges, final Job.Component step,
536                final Function<Edge, Component> direction) {
537            return edges.stream().filter(edge -> direction.apply(edge).equals(step)).collect(toList());
538        }
539
540        public GroupKeyProvider getKeyProvider(final String componentId) {
541            if (componentProperties.get(componentId) != null) {
542                final Object o = componentProperties.get(componentId).get(GroupKeyProvider.class.getName());
543                if (GroupKeyProvider.class.isInstance(o)) {
544                    return new GroupKeyProviderImpl(GroupKeyProvider.class.cast(o));
545                }
546            }
547
548            final Object o = jobProperties.get(GroupKeyProvider.class.getName());
549            if (GroupKeyProvider.class.isInstance(o)) {
550                return new GroupKeyProviderImpl(GroupKeyProvider.class.cast(o));
551            }
552
553            final ServiceLoader<GroupKeyProvider> services = ServiceLoader.load(GroupKeyProvider.class);
554            if (services.iterator().hasNext()) {
555                return services.iterator().next();
556            }
557
558            return LocalSequenceHolder.cleanAndGet(componentId);
559        }
560    }
561
562    @Data
563    private static class GroupContextImpl implements GroupKeyProvider.GroupContext {
564
565        private final Record data;
566
567        private final String componentId;
568
569        private final String branchName;
570    }
571
572    public static class LocalSequenceHolder {
573
574        private static final Map<String, AtomicLong> GENERATORS = new HashMap<>();
575
576        public static GroupKeyProvider cleanAndGet(final String name) {
577            GENERATORS.put(name, new AtomicLong(0));
578            return c -> Long.toString(GENERATORS.get(name).incrementAndGet());
579        }
580
581        public static void clean(final String name) {
582            GENERATORS.remove(name);
583        }
584    }
585
586    @Slf4j
587    private static class InputRunner {
588
589        private final Mapper chainedMapper;
590
591        private final Input input;
592
593        private final long maxRecords;
594
595        private long currentRecords;
596
597        private InputRunner(final Mapper mapper, final long maxRecords,
598                final Consumer<CheckpointState> checkpointCallback) {
599            this.maxRecords = maxRecords;
600            RuntimeException error = null;
601            try {
602                mapper.start();
603                chainedMapper = new ChainedMapper(mapper, mapper.split(mapper.assess()).iterator());
604                chainedMapper.start();
605                input = chainedMapper.create();
606                if (checkpointCallback == null) {
607                    input.start();
608                } else {
609                    input.start(checkpointCallback);
610                }
611            } catch (final RuntimeException re) {
612                error = re;
613                throw re;
614            } finally {
615                try {
616                    mapper.stop();
617                } catch (final RuntimeException re) {
618                    if (error == null) {
619                        throw re;
620                    }
621                    log.error(re.getMessage(), re);
622                }
623            }
624        }
625
626        public Record next() {
627            if (maxRecords > 0 && currentRecords >= maxRecords) {
628                return null;
629            }
630            final Object next = input.next();
631            if (next == null) {
632                return null;
633            }
634            currentRecords++;
635            return Record.class.cast(next);
636        }
637
638        public void stop() {
639            RuntimeException error = null;
640            try {
641                if (input != null) {
642                    input.stop();
643                }
644            } catch (final RuntimeException re) {
645                error = re;
646                throw re;
647            } finally {
648                try {
649                    if (chainedMapper != null) {
650                        chainedMapper.stop();
651                    }
652                } catch (final RuntimeException re) {
653                    if (error == null) {
654                        throw re;
655                    }
656                    log.error(re.getMessage(), re);
657                }
658            }
659        }
660    }
661
662    @Data
663    private static class DataOutputFactory implements OutputFactory {
664
665        private final Map<Class<?>, Object> services;
666
667        private final RecordConverters.MappingMetaRegistry registry;
668
669        private final Map<String, Collection<Record>> outputs = new HashMap<>();
670
671        @Override
672        public OutputEmitter create(final String name) {
673            return new OutputEmitterImpl(name, registry);
674        }
675
676        @AllArgsConstructor
677        private class OutputEmitterImpl implements OutputEmitter {
678
679            private final String name;
680
681            private final RecordConverters.MappingMetaRegistry registry;
682
683            @Override
684            public void emit(final Object value) {
685                outputs
686                        .computeIfAbsent(name, k -> new ArrayList<>())
687                        .add(new RecordConverters()
688                                .toRecord(registry, value, () -> Jsonb.class.cast(services.get(Jsonb.class)),
689                                        () -> RecordBuilderFactory.class
690                                                .cast(services.get(RecordBuilderFactory.class))));
691            }
692        }
693    }
694
695    private static class DataInputFactory implements InputFactory {
696
697        private final Map<String, Iterator<Object>> inputs = new HashMap<>();
698
699        private volatile Jsonb jsonb;
700
701        private volatile RecordBuilderFactory factory;
702
703        private volatile RecordConverters.MappingMetaRegistry registry;
704
705        private DataInputFactory withInput(final String branch, final Collection<Object> branchData) {
706            inputs.put(branch, branchData.iterator());
707            return this;
708        }
709
710        @Override
711        public Object read(final String name) {
712            final Iterator<?> iterator = inputs.get(name);
713            if (iterator != null && iterator.hasNext()) {
714                return map(iterator.next());
715            }
716            return null;
717        }
718
719        private Object map(final Object next) {
720            if (next == null || Record.class.isInstance(next)) {
721                return next;
722            }
723
724            final String str = jsonb().get().toJson(next);
725            // primitives mainly, not that accurate in main code but for now not forbidden
726            if (str.equals(next.toString())) {
727                return next;
728            }
729            if (registry == null) {
730                synchronized (this) {
731                    if (registry == null) {
732                        registry = new RecordConverters.MappingMetaRegistry();
733                    }
734                }
735            }
736            // pojo
737            return new RecordConverters().toRecord(registry, next, jsonb(), () -> {
738                if (factory == null) {
739                    synchronized (this) {
740                        if (factory == null) {
741                            factory = new RecordBuilderFactoryImpl("test");
742                        }
743                    }
744                }
745                return factory;
746            });
747        }
748
749        private Supplier<Jsonb> jsonb() {
750            return () -> {
751                if (jsonb == null) {
752                    synchronized (this) {
753                        if (jsonb == null) {
754                            jsonb = JsonbBuilder.create(new JsonbConfig().setProperty("johnzon.cdi.activated", false));
755                        }
756                    }
757                }
758                return jsonb;
759            };
760        }
761    }
762
763    @AllArgsConstructor
764    protected static class GroupKeyProviderImpl implements GroupKeyProvider {
765
766        private final GroupKeyProvider delegate;
767
768        @Override
769        public String apply(final GroupKeyProvider.GroupContext context) {
770            return delegate.apply(context);
771        }
772    }
773}