Flink-5.Flink 隨機key數(shù)據(jù)傾斜

package com.ctgu.flink.project;

import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Schema;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;

import java.util.Random;


public class Flink_Sql_Pv {
    public static void main(String[] args) throws Exception {

        long start = System.currentTimeMillis();

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(8);

        EnvironmentSettings settings = EnvironmentSettings
                .newInstance()
                .inStreamingMode()
                .build();

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);

        String createSql =
                "CREATE TABLE source " +
                        "    (" +
                        "    `userId` BIGINT," +
                        "    `itemId` BIGINT," +
                        "    `categoryId` INT," +
                        "    `behavior` STRING," +
                        "    `ts` BIGINT" +
                        "    )" +
                        "    WITH (" +
                        "       'connector'='filesystem'," +
                        "       'format'='csv'," +
                        "       'csv.field-delimiter'=','," +
                        "       'path'='data/UserBehavior.csv'" +
                        "    )";

        tableEnv.executeSql(createSql);

        String userBehavior = "select *, ts * 1000 as `timestamp` from source where behavior = 'pv'";

        Table userBehaviorTable = tableEnv.sqlQuery(userBehavior);

        DataStream<Row> rowDataStream = tableEnv.toDataStream(userBehaviorTable);

        Table source =
                tableEnv.fromDataStream(
                        rowDataStream,
                        Schema.newBuilder()
                                .columnByExpression("time_ltz", "TO_TIMESTAMP_LTZ(`timestamp`, 3)")
                                .watermark("time_ltz", "time_ltz - INTERVAL '5' SECOND")
                                .build());

        tableEnv.createTemporaryView("userBehavior", source);

        DataStream<Row> dataStream = tableEnv.toDataStream(source);

        DataStream<Tuple2<Long, Long>> sum = dataStream.filter(data -> "pv".equals(data.getField("behavior")))
                .map(new MyMapFunction())
                .keyBy(data -> data.f0)
                .window(TumblingEventTimeWindows.of(Time.hours(1)))
                .aggregate(new AverageAggregate(), new MyWindowFunction())
                .keyBy(data -> data.f0)
                .process(new MyProcessFunction());

        sum.print();

        env.execute("Table SQL");

        System.out.println("耗時: " + (System.currentTimeMillis() - start) / 1000);
    }

    private static class MyMapFunction
            extends RichMapFunction<Row, Tuple2<Integer, Long>>{

        @Override
        public Tuple2<Integer, Long> map(Row row) throws Exception {
            Random random = new Random();
            return new Tuple2<>(random.nextInt(10), 1L);
        }
    }

    private static class AverageAggregate
            implements AggregateFunction<Tuple2<Integer, Long>, Long, Long> {
        @Override
        public Long createAccumulator() {
            return 0L;
        }

        @Override
        public Long add(Tuple2<Integer, Long> integerLongTuple2, Long aLong) {
            return aLong + 1;
        }

        @Override
        public Long getResult(Long aLong) {
            return aLong;
        }

        @Override
        public Long merge(Long a, Long b) {
            return a + b;
        }
    }

    private static class MyWindowFunction
            implements WindowFunction<Long, Tuple2<Long, Long>, Integer, TimeWindow> {

        @Override
        public void apply(Integer integer,
                          TimeWindow timeWindow,
                          Iterable<Long> iterable,
                          Collector<Tuple2<Long, Long>> collector) throws Exception {
            collector.collect(new Tuple2<>(timeWindow.getEnd(), iterable.iterator().next()));
        }
    }

    private static class MyProcessFunction
            extends KeyedProcessFunction<Long, Tuple2<Long, Long>, Tuple2<Long, Long>> {

        private ValueState<Long> totalCountState;

        @Override
        public void open(Configuration parameters) throws Exception {
            totalCountState = getRuntimeContext().getState(new ValueStateDescriptor<>(
                    "total-count", Long.class, 0L));
        }

        @Override
        public void processElement(Tuple2<Long, Long> tuple2, Context context, Collector<Tuple2<Long, Long>> collector) throws Exception {
            totalCountState.update(totalCountState.value() + tuple2.f1);
            context.timerService().registerEventTimeTimer(tuple2.f0 + 1);
        }

        @Override
        public void onTimer(long timestamp, OnTimerContext ctx, Collector<Tuple2<Long, Long>> out) throws Exception {
            Long totalCount = totalCountState.value();
            out.collect(new Tuple2<>(ctx.getCurrentKey(), totalCount));
            totalCountState.clear();
        }
    }

}

最后編輯于
?著作權歸作者所有,轉載或內容合作請聯(lián)系作者
  • 序言:七十年代末,一起剝皮案震驚了整個濱河市丧蘸,隨后出現(xiàn)的幾起案子侈询,更是在濱河造成了極大的恐慌,老刑警劉巖剿干,帶你破解...
    沈念sama閱讀 218,451評論 6 506
  • 序言:濱河連續(xù)發(fā)生了三起死亡事件,死亡現(xiàn)場離奇詭異,居然都是意外死亡,警方通過查閱死者的電腦和手機流炕,發(fā)現(xiàn)死者居然都...
    沈念sama閱讀 93,172評論 3 394
  • 文/潘曉璐 我一進店門,熙熙樓的掌柜王于貴愁眉苦臉地迎上來仅胞,“玉大人每辟,你說我怎么就攤上這事「删桑” “怎么了渠欺?”我有些...
    開封第一講書人閱讀 164,782評論 0 354
  • 文/不壞的土叔 我叫張陵,是天一觀的道長椎眯。 經(jīng)常有香客問我挠将,道長胳岂,這世上最難降的妖魔是什么? 我笑而不...
    開封第一講書人閱讀 58,709評論 1 294
  • 正文 為了忘掉前任舔稀,我火速辦了婚禮乳丰,結果婚禮上,老公的妹妹穿的比我還像新娘内贮。我一直安慰自己产园,他們只是感情好,可當我...
    茶點故事閱讀 67,733評論 6 392
  • 文/花漫 我一把揭開白布贺归。 她就那樣靜靜地躺著淆两,像睡著了一般。 火紅的嫁衣襯著肌膚如雪拂酣。 梳的紋絲不亂的頭發(fā)上秋冰,一...
    開封第一講書人閱讀 51,578評論 1 305
  • 那天,我揣著相機與錄音婶熬,去河邊找鬼剑勾。 笑死,一個胖子當著我的面吹牛赵颅,可吹牛的內容都是我干的虽另。 我是一名探鬼主播,決...
    沈念sama閱讀 40,320評論 3 418
  • 文/蒼蘭香墨 我猛地睜開眼饺谬,長吁一口氣:“原來是場噩夢啊……” “哼捂刺!你這毒婦竟也來了?” 一聲冷哼從身側響起募寨,我...
    開封第一講書人閱讀 39,241評論 0 276
  • 序言:老撾萬榮一對情侶失蹤族展,失蹤者是張志新(化名)和其女友劉穎,沒想到半個月后拔鹰,有當?shù)厝嗽跇淞掷锇l(fā)現(xiàn)了一具尸體仪缸,經(jīng)...
    沈念sama閱讀 45,686評論 1 314
  • 正文 獨居荒郊野嶺守林人離奇死亡,尸身上長有42處帶血的膿包…… 初始之章·張勛 以下內容為張勛視角 年9月15日...
    茶點故事閱讀 37,878評論 3 336
  • 正文 我和宋清朗相戀三年列肢,在試婚紗的時候發(fā)現(xiàn)自己被綠了恰画。 大學時的朋友給我發(fā)了我未婚夫和他白月光在一起吃飯的照片。...
    茶點故事閱讀 39,992評論 1 348
  • 序言:一個原本活蹦亂跳的男人離奇死亡瓷马,死狀恐怖拴还,靈堂內的尸體忽然破棺而出,到底是詐尸還是另有隱情欧聘,我是刑警寧澤片林,帶...
    沈念sama閱讀 35,715評論 5 346
  • 正文 年R本政府宣布,位于F島的核電站,受9級特大地震影響拇厢,放射性物質發(fā)生泄漏爱谁。R本人自食惡果不足惜,卻給世界環(huán)境...
    茶點故事閱讀 41,336評論 3 330
  • 文/蒙蒙 一孝偎、第九天 我趴在偏房一處隱蔽的房頂上張望代箭。 院中可真熱鬧誉己,春花似錦拧簸、人聲如沸组去。這莊子的主人今日做“春日...
    開封第一講書人閱讀 31,912評論 0 22
  • 文/蒼蘭香墨 我抬頭看了看天上的太陽阻塑。三九已至,卻和暖如春果复,著一層夾襖步出監(jiān)牢的瞬間陈莽,已是汗流浹背。 一陣腳步聲響...
    開封第一講書人閱讀 33,040評論 1 270
  • 我被黑心中介騙來泰國打工虽抄, 沒想到剛下飛機就差點兒被人妖公主榨干…… 1. 我叫王不留走搁,地道東北人。 一個月前我還...
    沈念sama閱讀 48,173評論 3 370
  • 正文 我出身青樓迈窟,卻偏偏與公主長得像私植,于是被迫代替她去往敵國和親。 傳聞我的和親對象是個殘疾皇子车酣,可洞房花燭夜當晚...
    茶點故事閱讀 44,947評論 2 355

推薦閱讀更多精彩內容