This commit is contained in:
madefu
2019-05-07 16:10:45 +08:00
parent 60729e2883
commit 4eadf5ed04
4 changed files with 41 additions and 1 deletions

View File

@ -31,4 +31,4 @@ y_pred = regressor.predict(X_test)
# regression evaluation
from sklearn.metrics import r2_score
print(r2_score(Y_test,y_pred))
print(r2_score(Y_test, y_pred))

13
Code/KafkaProducer.py Normal file
View File

@ -0,0 +1,13 @@
#!/usr/bin/python
from kafka import KafkaProducer
kafkaHosts=["kafka01.paas.longfor.sit:9092"
,"kafka02.paas.longfor.sit:9092"
,"kafka03.paas.longfor.sit:9092"]
producer = KafkaProducer(bootstrap_servers=kafkaHosts);
for _ in range(20):
producer.send("testapplog_plm-prototype",b"Hello....")
producer.flush();

24
Code/TestKafka.py Normal file
View File

@ -0,0 +1,24 @@
#!/usr/bin/python
from kafka import KafkaConsumer;
kafkaHosts=["kafka01.paas.longfor.sit:9092"
,"kafka02.paas.longfor.sit:9092"
,"kafka03.paas.longfor.sit:9092"]
'''
earliest
当各分区下有已提交的offset时从提交的offset开始消费无提交的offset时从头开始消费
latest
当各分区下有已提交的offset时从提交的offset开始消费无提交的offset时消费新产生的该分区下的数据
none
topic各分区都存在已提交的offset时从offset后开始消费只要有一个分区不存在已提交的offset则抛出异常
'''
consumer = KafkaConsumer(
bootstrap_servers=kafkaHosts,group_id='mdf_group',auto_offset_reset='latest');
consumer.subscribe("testapplog_plm-prototype");
for msg in consumer:
print(msg.value)

View File

@ -24,3 +24,6 @@ Hours,Scores
3.8,35
6.9,76
7.8,86
2.1,93
2.2,93
2.5,93
1 Hours Scores
24 3.8 35
25 6.9 76
26 7.8 86
27 2.1 93
28 2.2 93
29 2.5 93