package com.ctgu.flink.project;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;
import java.util.Random;
public class Flink_Sql_Pv {
public static void main(String[] args) throws Exception {
long start = System.currentTimeMillis();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(8);
EnvironmentSettings settings = EnvironmentSettings
.newInstance()
.inStreamingMode()
.build();
StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);
String createSql =
"CREATE TABLE source " +
" (" +
" `userId` BIGINT," +
" `itemId` BIGINT," +
" `categoryId` INT," +
" `behavior` STRING," +
" `ts` BIGINT" +
" )" +
" WITH (" +
" 'connector'='filesystem'," +
" 'format'='csv'," +
" 'csv.field-delimiter'=','," +
" 'path'='data/UserBehavior.csv'" +
" )";
tableEnv.executeSql(createSql);
String userBehavior = "select *, ts * 1000 as `timestamp` from source where behavior = 'pv'";
Table userBehaviorTable = tableEnv.sqlQuery(userBehavior);
DataStream<Row> rowDataStream = tableEnv.toDataStream(userBehaviorTable);
Table source =
tableEnv.fromDataStream(
rowDataStream,
Schema.newBuilder()
.columnByExpression("time_ltz", "TO_TIMESTAMP_LTZ(`timestamp`, 3)")
.watermark("time_ltz", "time_ltz - INTERVAL '5' SECOND")
.build());
tableEnv.createTemporaryView("userBehavior", source);
DataStream<Row> dataStream = tableEnv.toDataStream(source);
DataStream<Tuple2<Long, Long>> sum = dataStream.filter(data -> "pv".equals(data.getField("behavior")))
.map(new MyMapFunction())
.keyBy(data -> data.f0)
.window(TumblingEventTimeWindows.of(Time.hours(1)))
.aggregate(new AverageAggregate(), new MyWindowFunction())
.keyBy(data -> data.f0)
.process(new MyProcessFunction());
sum.print();
env.execute("Table SQL");
System.out.println("耗时: " + (System.currentTimeMillis() - start) / 1000);
}
private static class MyMapFunction
extends RichMapFunction<Row, Tuple2<Integer, Long>>{
@Override
public Tuple2<Integer, Long> map(Row row) throws Exception {
Random random = new Random();
return new Tuple2<>(random.nextInt(10), 1L);
}
}
private static class AverageAggregate
implements AggregateFunction<Tuple2<Integer, Long>, Long, Long> {
@Override
public Long createAccumulator() {
return 0L;
}
@Override
public Long add(Tuple2<Integer, Long> integerLongTuple2, Long aLong) {
return aLong + 1;
}
@Override
public Long getResult(Long aLong) {
return aLong;
}
@Override
public Long merge(Long a, Long b) {
return a + b;
}
}
private static class MyWindowFunction
implements WindowFunction<Long, Tuple2<Long, Long>, Integer, TimeWindow> {
@Override
public void apply(Integer integer,
TimeWindow timeWindow,
Iterable<Long> iterable,
Collector<Tuple2<Long, Long>> collector) throws Exception {
collector.collect(new Tuple2<>(timeWindow.getEnd(), iterable.iterator().next()));
}
}
private static class MyProcessFunction
extends KeyedProcessFunction<Long, Tuple2<Long, Long>, Tuple2<Long, Long>> {
private ValueState<Long> totalCountState;
@Override
public void open(Configuration parameters) throws Exception {
totalCountState = getRuntimeContext().getState(new ValueStateDescriptor<>(
"total-count", Long.class, 0L));
}
@Override
public void processElement(Tuple2<Long, Long> tuple2, Context context, Collector<Tuple2<Long, Long>> collector) throws Exception {
totalCountState.update(totalCountState.value() + tuple2.f1);
context.timerService().registerEventTimeTimer(tuple2.f0 + 1);
}
@Override
public void onTimer(long timestamp, OnTimerContext ctx, Collector<Tuple2<Long, Long>> out) throws Exception {
Long totalCount = totalCountState.value();
out.collect(new Tuple2<>(ctx.getCurrentKey(), totalCount));
totalCountState.clear();
}
}
}
网友评论