Commit 1c773c4e862c9ff3bcdf20156b61f9a94f8b1a42

Authored by yiming
1 parent 78137fe0

修改的定时器时间

src/main/java/com/example/demo/SaticScheduleTask.java
@@ -56,6 +56,8 @@ public class SaticScheduleTask implements InitializingBean { @@ -56,6 +56,8 @@ public class SaticScheduleTask implements InitializingBean {
56 List<Map<String,List<Scheduling>>> result=contrast.getSchedulingS(now,1); 56 List<Map<String,List<Scheduling>>> result=contrast.getSchedulingS(now,1);
57 contrast.getSchedulingS=result.get(0); 57 contrast.getSchedulingS=result.get(0);
58 contrast.getSchedulingSSell=result.get(1); 58 contrast.getSchedulingSSell=result.get(1);
  59 + logger.info("===============================数据存放结束================================");
  60 + logger.info("===============================开始读取文件================================");
59 WatchKey watchKey = null; 61 WatchKey watchKey = null;
60 List<File> files = new ArrayList<>(); 62 List<File> files = new ArrayList<>();
61 while ((watchKey = watchService.poll()) != null) { 63 while ((watchKey = watchService.poll()) != null) {
@@ -69,6 +71,7 @@ public class SaticScheduleTask implements InitializingBean { @@ -69,6 +71,7 @@ public class SaticScheduleTask implements InitializingBean {
69 watchKey.reset(); 71 watchKey.reset();
70 } 72 }
71 System.out.println("文件数"+files.size()); 73 System.out.println("文件数"+files.size());
  74 + logger.info("===============================读取文件结束================================");
72 //先把处理上一次没匹配到的 75 //先把处理上一次没匹配到的
73 if(!contrast.oldData.isEmpty()){ 76 if(!contrast.oldData.isEmpty()){
74 contrast.filterCardList(contrast.oldData,true); 77 contrast.filterCardList(contrast.oldData,true);
src/main/java/com/example/demo/service/Contrast.java
@@ -180,8 +180,8 @@ public class Contrast { @@ -180,8 +180,8 @@ public class Contrast {
180 List<Map<String,List<Scheduling>>> result=new ArrayList<>(); 180 List<Map<String,List<Scheduling>>> result=new ArrayList<>();
181 result.add(getSchedulingS); 181 result.add(getSchedulingS);
182 result.add(getSchedulingSSell); 182 result.add(getSchedulingSSell);
183 -  
184 - logger.warn("======匹配"+arrivalInfoList.size()+"条======匹配成功 "+b+"条=====耗时=="+(System.currentTimeMillis() - start)/1000 + "秒"); 183 + logger.info("======匹配"+arrivalInfoList.size()+"条======匹配成功 "+b+"条=====耗时=="+(System.currentTimeMillis() - start)/1000 + "秒");
  184 + logger.info("===============================预加载数据匹配结束================================");
185 return result; 185 return result;
186 } 186 }
187 187
@@ -193,6 +193,7 @@ public class Contrast { @@ -193,6 +193,7 @@ public class Contrast {
193 * @Date: 2021/10/13 193 * @Date: 2021/10/13
194 */ 194 */
195 public void filterCardList(List<TJRLDB> allData, Boolean isOld) { 195 public void filterCardList(List<TJRLDB> allData, Boolean isOld) {
  196 + logger.info("===============================票务数据匹配开始================================");
196 long start = System.currentTimeMillis(); 197 long start = System.currentTimeMillis();
197 List<Map> driverCardList = getDriverCardList();//获取司机信息 198 List<Map> driverCardList = getDriverCardList();//获取司机信息
198 Map<String,List<Scheduling>> schedulingS =getSchedulingS; 199 Map<String,List<Scheduling>> schedulingS =getSchedulingS;
@@ -332,6 +333,7 @@ public class Contrast { @@ -332,6 +333,7 @@ public class Contrast {
332 oldData.clear(); 333 oldData.clear();
333 } 334 }
334 logger.warn("======匹配"+size+"条======匹配到"+b+"条=====耗时=="+(System.currentTimeMillis() - start)/1000 + "秒"); 335 logger.warn("======匹配"+size+"条======匹配到"+b+"条=====耗时=="+(System.currentTimeMillis() - start)/1000 + "秒");
  336 + logger.info("===============================票务数据匹配结束================================");
335 } 337 }
336 338
337 339
src/main/resources/application.properties
@@ -34,10 +34,7 @@ spring.kafka.producer.acks=all @@ -34,10 +34,7 @@ spring.kafka.producer.acks=all
34 spring.kafka.producer.properties.max.request.size=20971520 34 spring.kafka.producer.properties.max.request.size=20971520
35 35
36 36
37 -  
38 -logging.path=/home/demo/log  
39 -  
40 - 37 +#
41 #spring.datasource.db1.jdbc-url=jdbc:mysql://192.168.168.240:3306/pd_control?useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC 38 #spring.datasource.db1.jdbc-url=jdbc:mysql://192.168.168.240:3306/pd_control?useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC
42 #spring.datasource.db1.username=root 39 #spring.datasource.db1.username=root
43 #spring.datasource.db1.password=root2jsp 40 #spring.datasource.db1.password=root2jsp
@@ -70,4 +67,5 @@ logging.path=/home/demo/log @@ -70,4 +67,5 @@ logging.path=/home/demo/log
70 #spring.kafka.producer.properties.max.request.size=10485760 67 #spring.kafka.producer.properties.max.request.size=10485760
71 68
72 69
73 -#logging.path=E:/log  
74 \ No newline at end of file 70 \ No newline at end of file
  71 +logging.path=./logs
  72 +logging.file=Log
75 \ No newline at end of file 73 \ No newline at end of file
src/main/resources/logback.xml 0 → 100644
  1 +<?xml version="1.0" encoding="utf-8"?>
  2 +<configuration>
  3 + <appender name="CONSOLE-LOG" class="ch.qos.logback.core.ConsoleAppender">
  4 + <layout class="ch.qos.logback.classic.PatternLayout">
  5 + <pattern>[%d{yyyy-MM-dd' 'HH:mm:ss.sss}] [%C] [%t] [%L] [%-5p] %m%n</pattern>
  6 + </layout>
  7 + </appender>
  8 +
  9 + <!--获取比info级别高(包括info级别)但除error级别的日志-->
  10 + <appender name="INFO-LOG" class="ch.qos.logback.core.rolling.RollingFileAppender">
  11 + <filter class="ch.qos.logback.classic.filter.LevelFilter">
  12 + <level>ERROR</level>
  13 + <onMatch>DENY</onMatch>
  14 + <onMismatch>ACCEPT</onMismatch>
  15 + </filter>
  16 + <encoder>
  17 + <pattern>[%d{yyyy-MM-dd' 'HH:mm:ss.sss}] [%C] [%t] [%L] [%-5p] %m%n</pattern>
  18 + </encoder>
  19 +
  20 + <!--滚动策略-->
  21 + <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
  22 + <!--路径-->
  23 + <fileNamePattern>${LOG_INFO_HOME}//%d.log</fileNamePattern>
  24 + <maxHistory>30</maxHistory>
  25 + </rollingPolicy>
  26 + </appender>
  27 + <appender name="ERROR-LOG" class="ch.qos.logback.core.rolling.RollingFileAppender">
  28 + <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
  29 + <level>ERROR</level>
  30 + </filter>
  31 + <encoder>
  32 + <pattern>[%d{yyyy-MM-dd' 'HH:mm:ss.sss}] [%C] [%t] [%L] [%-5p] %m%n</pattern>
  33 + </encoder>
  34 + <!--滚动策略-->
  35 + <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
  36 + <!--路径-->
  37 + <fileNamePattern>${LOG_ERROR_HOME}//%d.log</fileNamePattern>
  38 + <maxHistory>30</maxHistory>
  39 + </rollingPolicy>
  40 + </appender>
  41 +
  42 + <appender name="ASYNC-INFO" class="ch.qos.logback.classic.AsyncAppender">
  43 + <!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
  44 + <discardingThreshold>0</discardingThreshold>
  45 + <!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
  46 + <queueSize>256</queueSize>
  47 + <!-- 添加附加的appender,最多只能添加一个 -->
  48 + <appender-ref ref="INFO-LOG"/>
  49 + </appender>
  50 +
  51 + <appender name="ASYNC-ERROR" class="ch.qos.logback.classic.AsyncAppender">
  52 + <!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
  53 + <discardingThreshold>0</discardingThreshold>
  54 + <!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
  55 + <queueSize>256</queueSize>
  56 + <!-- 添加附加的appender,最多只能添加一个 -->
  57 + <appender-ref ref="ERROR-LOG"/>
  58 + </appender>
  59 +
  60 + <root level="info">
  61 + <appender-ref ref="CONSOLE-LOG" />
  62 + <appender-ref ref="INFO-LOG" />
  63 + <appender-ref ref="ERROR-LOG" />
  64 + </root>
  65 +</configuration>