Commit 8306ad45 authored by 彭勇升 pengys's avatar 彭勇升 pengys Committed by 吴晟
Browse files

Reduce the number of threads to improve performance. (#3133)

* Refactor Persistence worker.

* 1. Provide InsertRequest and UpdateRequest interface for prepare persistence.
2. Implement the ids query for H2 metrics DAO.

* Refactor worker framework

* Use queue to receive asynchronous batch request.

* Rename the Datacarrier thread name.

* Fixed some mistake.

* New mistake.
parent ab9bc922
Loading
Loading
Loading
Loading
+9 −36
Original line number Diff line number Diff line
@@ -28,6 +28,7 @@ import org.apache.skywalking.oap.server.core.exporter.ExportEvent;
import org.apache.skywalking.oap.server.core.storage.IMetricsDAO;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.core.worker.AbstractWorker;
import org.apache.skywalking.oap.server.library.client.request.PrepareRequest;
import org.apache.skywalking.oap.server.library.module.ModuleDefineHolder;
import org.slf4j.*;

@@ -45,10 +46,9 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat
    private final AbstractWorker<ExportEvent> nextExportWorker;
    private final DataCarrier<Metrics> dataCarrier;

    MetricsPersistentWorker(ModuleDefineHolder moduleDefineHolder, Model model, int batchSize,
        IMetricsDAO metricsDAO, AbstractWorker<Metrics> nextAlarmWorker,
    MetricsPersistentWorker(ModuleDefineHolder moduleDefineHolder, Model model, IMetricsDAO metricsDAO, AbstractWorker<Metrics> nextAlarmWorker,
        AbstractWorker<ExportEvent> nextExportWorker) {
        super(moduleDefineHolder, batchSize);
        super(moduleDefineHolder);
        this.model = model;
        this.mergeDataCache = new MergeDataCache<>();
        this.metricsDAO = metricsDAO;
@@ -76,7 +76,6 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat
    }

    @Override public void in(Metrics metrics) {
        metrics.resetEndOfBatch();
        dataCarrier.produce(metrics);
    }

@@ -84,23 +83,9 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat
        return mergeDataCache;
    }

    public boolean flushAndSwitch() {
        boolean isSwitch;
        try {
            if (isSwitch = getCache().trySwitchPointer()) {
                getCache().switchPointer();
            }
        } finally {
            getCache().trySwitchPointerFinally();
        }
        return isSwitch;
    }

    @Override public List<Object> prepareBatch(MergeDataCache<Metrics> cache) {
    @Override public void prepareBatch(MergeDataCache<Metrics> cache, List<PrepareRequest> prepareRequests) {
        long start = System.currentTimeMillis();

        List<Object> batchCollection = new LinkedList<>();

        Collection<Metrics> collection = cache.getLast().collection();

        int i = 0;
@@ -131,9 +116,9 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat
                        if (dbMetricsMap.containsKey(metric.id())) {
                            metric.combine(dbMetricsMap.get(metric.id()));
                            metric.calculate();
                            batchCollection.add(metricsDAO.prepareBatchUpdate(model, metric));
                            prepareRequests.add(metricsDAO.prepareBatchUpdate(model, metric));
                        } else {
                            batchCollection.add(metricsDAO.prepareBatchInsert(model, metric));
                            prepareRequests.add(metricsDAO.prepareBatchInsert(model, metric));
                        }

                        if (Objects.nonNull(nextAlarmWorker)) {
@@ -152,11 +137,9 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat
            i++;
        }

        if (batchCollection.size() > 0) {
            logger.debug("prepareBatch model {}, took time: {}", model.getName(), System.currentTimeMillis() - start);
        if (prepareRequests.size() > 0) {
            logger.debug("prepare batch requests for model {}, took time: {}", model.getName(), System.currentTimeMillis() - start);
        }

        return batchCollection;
    }

    @Override public void cacheData(Metrics input) {
@@ -186,17 +169,7 @@ public class MetricsPersistentWorker extends PersistenceWorker<Metrics, MergeDat
        }

        @Override public void consume(List<Metrics> data) {
            Iterator<Metrics> inputIterator = data.iterator();

            int i = 0;
            while (inputIterator.hasNext()) {
                Metrics metrics = inputIterator.next();
                i++;
                if (i == data.size()) {
                    metrics.asEndOfBatch();
                }
                persistent.onWork(metrics);
            }
            data.forEach(persistent::onWork);
        }

        @Override public void onError(List<Metrics> data, Throwable t) {
+8 −19
Original line number Diff line number Diff line
@@ -18,25 +18,15 @@

package org.apache.skywalking.oap.server.core.analysis.worker;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.*;
import lombok.Getter;
import org.apache.skywalking.oap.server.core.CoreModule;
import org.apache.skywalking.oap.server.core.UnexpectedException;
import org.apache.skywalking.oap.server.core.analysis.DisableRegister;
import org.apache.skywalking.oap.server.core.analysis.Downsampling;
import org.apache.skywalking.oap.server.core.analysis.Stream;
import org.apache.skywalking.oap.server.core.analysis.StreamProcessor;
import org.apache.skywalking.oap.server.core.*;
import org.apache.skywalking.oap.server.core.analysis.*;
import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
import org.apache.skywalking.oap.server.core.config.DownsamplingConfigService;
import org.apache.skywalking.oap.server.core.storage.IMetricsDAO;
import org.apache.skywalking.oap.server.core.storage.StorageDAO;
import org.apache.skywalking.oap.server.core.storage.StorageModule;
import org.apache.skywalking.oap.server.core.storage.*;
import org.apache.skywalking.oap.server.core.storage.annotation.Storage;
import org.apache.skywalking.oap.server.core.storage.model.IModelSetter;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.core.storage.model.*;
import org.apache.skywalking.oap.server.core.worker.IWorkerInstanceSetter;
import org.apache.skywalking.oap.server.library.module.ModuleDefineHolder;

@@ -61,6 +51,7 @@ public class MetricsStreamProcessor implements StreamProcessor<Metrics> {
        }
    }

    @SuppressWarnings("unchecked")
    public void create(ModuleDefineHolder moduleDefineHolder, Stream stream, Class<? extends Metrics> metricsClass) {
        if (DisableRegister.INSTANCE.include(stream.name())) {
            return;
@@ -114,16 +105,14 @@ public class MetricsStreamProcessor implements StreamProcessor<Metrics> {
        AlarmNotifyWorker alarmNotifyWorker = new AlarmNotifyWorker(moduleDefineHolder);
        ExportWorker exportWorker = new ExportWorker(moduleDefineHolder);

        MetricsPersistentWorker minutePersistentWorker = new MetricsPersistentWorker(moduleDefineHolder, model,
            1000, metricsDAO, alarmNotifyWorker, exportWorker);
        MetricsPersistentWorker minutePersistentWorker = new MetricsPersistentWorker(moduleDefineHolder, model, metricsDAO, alarmNotifyWorker, exportWorker);
        persistentWorkers.add(minutePersistentWorker);

        return minutePersistentWorker;
    }

    private MetricsPersistentWorker worker(ModuleDefineHolder moduleDefineHolder, IMetricsDAO metricsDAO, Model model) {
        MetricsPersistentWorker persistentWorker = new MetricsPersistentWorker(moduleDefineHolder, model,
            1000, metricsDAO, null, null);
        MetricsPersistentWorker persistentWorker = new MetricsPersistentWorker(moduleDefineHolder, model, metricsDAO, null, null);
        persistentWorkers.add(persistentWorker);

        return persistentWorker;
+7 −25
Original line number Diff line number Diff line
@@ -18,10 +18,11 @@

package org.apache.skywalking.oap.server.core.analysis.worker;

import java.util.*;
import java.util.List;
import org.apache.skywalking.oap.server.core.analysis.data.Window;
import org.apache.skywalking.oap.server.core.storage.*;
import org.apache.skywalking.oap.server.core.storage.StorageData;
import org.apache.skywalking.oap.server.core.worker.AbstractWorker;
import org.apache.skywalking.oap.server.library.client.request.PrepareRequest;
import org.apache.skywalking.oap.server.library.module.ModuleDefineHolder;
import org.slf4j.*;

@@ -32,28 +33,11 @@ public abstract class PersistenceWorker<INPUT extends StorageData, CACHE extends

    private static final Logger logger = LoggerFactory.getLogger(PersistenceWorker.class);

    private final int batchSize;
    private final IBatchDAO batchDAO;

    PersistenceWorker(ModuleDefineHolder moduleDefineHolder, int batchSize) {
    PersistenceWorker(ModuleDefineHolder moduleDefineHolder) {
        super(moduleDefineHolder);
        this.batchSize = batchSize;
        this.batchDAO = moduleDefineHolder.find(StorageModule.NAME).provider().getService(IBatchDAO.class);
    }

    void onWork(INPUT input) {
        if (getCache().currentCollectionSize() >= batchSize) {
            try {
                if (getCache().trySwitchPointer()) {
                    getCache().switchPointer();

                    List<?> collection = buildBatchCollection();
                    batchDAO.asynchronous(collection);
                }
            } finally {
                getCache().trySwitchPointerFinally();
            }
        }
        cacheData(input);
    }

@@ -73,10 +57,9 @@ public abstract class PersistenceWorker<INPUT extends StorageData, CACHE extends
        return isSwitch;
    }

    public abstract List<Object> prepareBatch(CACHE cache);
    public abstract void prepareBatch(CACHE cache, List<PrepareRequest> prepareRequests);

    public final List<?> buildBatchCollection() {
        List<?> batchCollection = new LinkedList<>();
    public final void buildBatchRequests(List<PrepareRequest> prepareRequests) {
        try {
            while (getCache().getLast().isWriting()) {
                try {
@@ -87,11 +70,10 @@ public abstract class PersistenceWorker<INPUT extends StorageData, CACHE extends
            }

            if (getCache().getLast().collection() != null) {
                batchCollection = prepareBatch(getCache());
                prepareBatch(getCache(), prepareRequests);
            }
        } finally {
            getCache().finishReadingLast();
        }
        return batchCollection;
    }
}
+14 −72
Original line number Diff line number Diff line
@@ -18,97 +18,39 @@

package org.apache.skywalking.oap.server.core.analysis.worker;

import java.util.*;
import org.apache.skywalking.apm.commons.datacarrier.DataCarrier;
import org.apache.skywalking.apm.commons.datacarrier.consumer.*;
import org.apache.skywalking.oap.server.core.UnexpectedException;
import org.apache.skywalking.oap.server.core.analysis.data.NonMergeDataCache;
import java.io.IOException;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
import org.apache.skywalking.oap.server.core.storage.IRecordDAO;
import org.apache.skywalking.oap.server.core.storage.*;
import org.apache.skywalking.oap.server.core.storage.model.Model;
import org.apache.skywalking.oap.server.core.worker.AbstractWorker;
import org.apache.skywalking.oap.server.library.client.request.InsertRequest;
import org.apache.skywalking.oap.server.library.module.ModuleDefineHolder;
import org.slf4j.*;

/**
 * @author peng-yongsheng
 */
public class RecordPersistentWorker extends PersistenceWorker<Record, NonMergeDataCache<Record>> {
public class RecordPersistentWorker extends AbstractWorker<Record> {

    private static final Logger logger = LoggerFactory.getLogger(RecordPersistentWorker.class);

    private final Model model;
    private final NonMergeDataCache<Record> nonMergeDataCache;
    private final IRecordDAO recordDAO;
    private final DataCarrier<Record> dataCarrier;
    private final IBatchDAO batchDAO;

    RecordPersistentWorker(ModuleDefineHolder moduleDefineHolder, Model model, int batchSize,
        IRecordDAO recordDAO) {
        super(moduleDefineHolder, batchSize);
    RecordPersistentWorker(ModuleDefineHolder moduleDefineHolder, Model model, IRecordDAO recordDAO) {
        super(moduleDefineHolder);
        this.model = model;
        this.nonMergeDataCache = new NonMergeDataCache<>();
        this.recordDAO = recordDAO;

        String name = "RECORD_PERSISTENT";
        BulkConsumePool.Creator creator = new BulkConsumePool.Creator(name, 1, 20);
        try {
            ConsumerPoolFactory.INSTANCE.createIfAbsent(name, creator);
        } catch (Exception e) {
            throw new UnexpectedException(e.getMessage(), e);
        }

        this.dataCarrier = new DataCarrier<>(1, 10000);
        this.dataCarrier.consume(ConsumerPoolFactory.INSTANCE.get(name), new RecordPersistentWorker.PersistentConsumer(this));
        this.batchDAO = moduleDefineHolder.find(StorageModule.NAME).provider().getService(IBatchDAO.class);
    }

    @Override public void in(Record record) {
        dataCarrier.produce(record);
    }

    @Override public NonMergeDataCache<Record> getCache() {
        return nonMergeDataCache;
    }

    @Override public List<Object> prepareBatch(NonMergeDataCache<Record> cache) {
        List<Object> batchCollection = new LinkedList<>();
        cache.getLast().collection().forEach(record -> {
        try {
                batchCollection.add(recordDAO.prepareBatchInsert(model, record));
            } catch (Throwable t) {
                logger.error(t.getMessage(), t);
            }
        });
        return batchCollection;
    }

    @Override public void cacheData(Record input) {
        nonMergeDataCache.writing();
        nonMergeDataCache.add(input);
        nonMergeDataCache.finishWriting();
    }

    private class PersistentConsumer implements IConsumer<Record> {

        private final RecordPersistentWorker persistent;

        private PersistentConsumer(RecordPersistentWorker persistent) {
            this.persistent = persistent;
        }

        @Override public void init() {

        }

        @Override public void consume(List<Record> data) {
            for (Record record : data) {
                persistent.onWork(record);
            }
        }

        @Override public void onError(List<Record> data, Throwable t) {
            logger.error(t.getMessage(), t);
        }

        @Override public void onExit() {
            InsertRequest insertRequest = recordDAO.prepareBatchInsert(model, record);
            batchDAO.asynchronous(insertRequest);
        } catch (IOException e) {
            logger.error(e.getMessage(), e);
        }
    }
}
+1 −5
Original line number Diff line number Diff line
@@ -19,7 +19,6 @@
package org.apache.skywalking.oap.server.core.analysis.worker;

import java.util.*;
import lombok.Getter;
import org.apache.skywalking.oap.server.core.*;
import org.apache.skywalking.oap.server.core.analysis.*;
import org.apache.skywalking.oap.server.core.analysis.record.Record;
@@ -48,8 +47,6 @@ public class RecordStreamProcessor implements StreamProcessor<Record> {
        }
    }

    @Getter private List<RecordPersistentWorker> persistentWorkers = new ArrayList<>();

    @SuppressWarnings("unchecked")
    public void create(ModuleDefineHolder moduleDefineHolder, Stream stream, Class<? extends Record> recordClass) {
        if (DisableRegister.INSTANCE.include(stream.name())) {
@@ -66,9 +63,8 @@ public class RecordStreamProcessor implements StreamProcessor<Record> {

        IModelSetter modelSetter = moduleDefineHolder.find(CoreModule.NAME).provider().getService(IModelSetter.class);
        Model model = modelSetter.putIfAbsent(recordClass, stream.scopeId(), new Storage(stream.name(), true, true, Downsampling.Second), true);
        RecordPersistentWorker persistentWorker = new RecordPersistentWorker(moduleDefineHolder, model, 4000, recordDAO);
        RecordPersistentWorker persistentWorker = new RecordPersistentWorker(moduleDefineHolder, model, recordDAO);

        persistentWorkers.add(persistentWorker);
        workers.put(recordClass, persistentWorker);
    }
}
Loading