wangzhun 2 years ago
parent
commit
b8946fd75a
15 changed files with 69 additions and 20 deletions
  1. 1 1
      sxgk-projet/sxgk-dataImport/dataImport-boot/pom.xml
  2. 10 0
      sxgk-projet/sxgk-dataImport/dataImport-boot/readme.txt
  3. 8 0
      sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/controller/DataImportController.java
  4. 2 0
      sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/entity/Psnode.java
  5. 2 0
      sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/entity/Pspipe.java
  6. 3 0
      sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/entity/hb/FileSource.java
  7. 3 2
      sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/entity/hb/Node.java
  8. 3 2
      sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/entity/hb/Pipe.java
  9. 2 0
      sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/service/DataMergeService.java
  10. 23 6
      sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/service/impl/DataMergeServiceImpl.java
  11. 3 0
      sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/task/DataPullTask.java
  12. 1 1
      sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/resources/mapper/FileSourceMapper.xml
  13. 1 1
      sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/resources/mapper/PsnodeMapper.xml
  14. 1 1
      sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/resources/mapper/PspipeMapper.xml
  15. 6 6
      sxgk-projet/sxgk/src/main/resources/customMapper/CustomPersonWorkBenchMapper.xml

+ 1 - 1
sxgk-projet/sxgk-dataImport/dataImport-boot/pom.xml

@@ -35,7 +35,7 @@
             <version>0.4.2</version>
             <scope>compile</scope>
         </dependency>
-        <dependency>
+      <dependency>
             <groupId>junit</groupId>
             <artifactId>junit</artifactId>
             <scope>test</scope>

+ 10 - 0
sxgk-projet/sxgk-dataImport/dataImport-boot/readme.txt

@@ -0,0 +1,10 @@
+该项目用于拉取洪波数据并且进行相关的数据合并
+
+通过宏波管点管线 更新成果表中的数据
+
+update tf_ywpn_pspipe_w a set a.hb_id =(select  hb_id from tf_hb_pipe b
+ where ( b.work_area = a.DESIGN_GONGHAO and a.in_juneid = b.start_bsm  and a.out_juneid = b.end_bsm )   )
+
+
+
+update tf_ywpn_psnode_w a set hb_id=(select hb_id from tf_hb_node b where a.design_gonghao = b.work_area and  a.pipe_id = b.cad_code )

+ 8 - 0
sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/controller/DataImportController.java

@@ -579,6 +579,14 @@ public class DataImportController {
 		dataMergeService.pictureDownload();
 		return  ResultRespone.success();
 	}
+
+	@ApiOperation(value = "图片临时下载")
+	@PostMapping("/pictureDownloadTemp")
+	public ResultRespone pictureDownloadTemp(String secret) {
+		Assert.isTrue("tofly!2021@init".equals(secret),"初始化密码错误,请联系开发人员");
+		dataMergeService.pictureDownload();
+		return  ResultRespone.success();
+	}
 	private String getPageQuery(Object currentPage,int pageSize,String updateTime){
 
 

+ 2 - 0
sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/entity/Psnode.java

@@ -445,4 +445,6 @@ public class Psnode extends Model<Psnode> {
     private Long prjId;
     @TableLogic
     private String isPublish;
+
+    private  String hbId;
 }

+ 2 - 0
sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/entity/Pspipe.java

@@ -618,5 +618,7 @@ public class Pspipe extends Model<Pspipe> {
         @JsonFormat(pattern="yyyy-MM-dd HH:mm:ss",timezone = "GMT+8")
     @DateTimeFormat(pattern="yyyy-MM-dd HH:mm:ss")
                 private Date acceptanceDate;
+
+    private  String hbId;
             
 }

+ 3 - 0
sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/entity/hb/FileSource.java

@@ -65,5 +65,8 @@ public class FileSource extends Model<FileSource> {
     @TableField(insertStrategy = FieldStrategy.IGNORED,updateStrategy = FieldStrategy.IGNORED)
     @ApiModelProperty(value = "上传人")
     private String uploader;
+
+    @ApiModelProperty(value = "本地文件地址")
+    private String localUrl;
     
 }

+ 3 - 2
sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/entity/hb/Node.java

@@ -8,8 +8,9 @@ import lombok.Data;
 import lombok.EqualsAndHashCode;
 
 
-
-
+/**
+ * 洪波管点数据
+ */
 @Data
 @EqualsAndHashCode(callSuper = true)
 @TableName(value="TF_HB_NODE",resultMap = "hbNodeMap")

+ 3 - 2
sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/entity/hb/Pipe.java

@@ -12,8 +12,9 @@ import org.springframework.format.annotation.DateTimeFormat;
 import java.util.Date;
 
 
-
-
+/**
+ * 洪波管线数据
+ */
 @Data
 @EqualsAndHashCode(callSuper = true)
 @TableName(value="TF_HB_PIPE",resultMap = "hbPipeMap")

+ 2 - 0
sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/service/DataMergeService.java

@@ -5,4 +5,6 @@ public interface DataMergeService {
     public  void scanDataMerge(String prjId);
 
     public void pictureDownload();
+
+    public void pictureDownLoadTemp();
 }

+ 23 - 6
sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/service/impl/DataMergeServiceImpl.java

@@ -15,6 +15,7 @@ import com.tofly.dataImport.mapper.FileSourceMapper;
 import com.tofly.dataImport.mapper.ScanMapper;
 import com.tofly.dataImport.service.*;
 import lombok.AllArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
 import org.springframework.data.redis.core.ValueOperations;
 import org.springframework.http.HttpMethod;
 import org.springframework.http.MediaType;
@@ -29,6 +30,7 @@ import java.util.*;
 
 @Service
 @AllArgsConstructor
+@Slf4j
 public class DataMergeServiceImpl implements DataMergeService {
   private ScanMapper scanMapper;
   private TdtechService tdtechService;
@@ -46,7 +48,8 @@ public class DataMergeServiceImpl implements DataMergeService {
     private final ProcedureNewService procedureNewService;
     private final PsnodeService psnodeService;
     private  final PspipeService pspipeService;
-    private  final FileSourceMapper fileSourceMapper;
+    private final FileSourceMapper fileSourceMapper;
+    private final FileSourceService  fileSourceService;
     private final UserService userService;
     private  final NewpsnodeService newpsnodeService;
     private final NewpspipeService newpspipeService;
@@ -57,6 +60,7 @@ public class DataMergeServiceImpl implements DataMergeService {
 
     private final  ScanService scanService;
 
+
     @Resource(name = "sxgkRestTemplate")
     private RestTemplate restTemplate;
 
@@ -73,13 +77,13 @@ public class DataMergeServiceImpl implements DataMergeService {
             page.setSize(1000);
             String conditionSql=null;
             if(StringUtils.isNotBlank(prjId)){
-                conditionSql = "select remarks from tf_ywpn_psnode_w where remarks is not null and prj_id="+prjId+"\n" +
+                conditionSql = "select hb_id from tf_ywpn_psnode_w where hb_id is not null and prj_id="+prjId+"\n" +
                         "union \n" +
                         "select remarks from tf_ywpn_pspipe_w where remarks is not null and prj_id="+prjId+"\n";
             }else{
-                conditionSql = "select remarks from tf_ywpn_psnode_w where remarks is not null\n" +
+                conditionSql = "select remarks from tf_ywpn_psnode_w where hb_id is not null\n" +
                         "union \n" +
-                        "select remarks from tf_ywpn_pspipe_w where remarks is not null \n";
+                        "select hb_id from tf_ywpn_pspipe_w where hb_id is not null \n";
             }
 
             Page<Scan> page1 = scanMapper.selectPage(page, Wrappers.<Scan>lambdaQuery()
@@ -168,6 +172,19 @@ public class DataMergeServiceImpl implements DataMergeService {
         }
     }
 
+    @Override
+    public void pictureDownLoadTemp() {
+
+        final List<FileSource> list = fileSourceService.list();
+        list.forEach(li->{
+            final String s = downloadRemoteFile(li.getUrl());
+            li.setLocalUrl(s);
+            log.info("图片下载:"+li.getId());
+            fileSourceService.updateById(li);
+        });
+
+    }
+
     private void fillScan(List<Scan> records) {
         List<Newpsnodefile> newpsnodefileList = new ArrayList<>();
         List<Newpspipefile> newpspipefileList = new ArrayList<>();
@@ -426,7 +443,7 @@ public class DataMergeServiceImpl implements DataMergeService {
         //1.查询出node表中的数据的hb_id  存入redis
         List<String> list =
                 psnodeService.listObjs(Wrappers.<Psnode>lambdaQuery()
-                        .select(Psnode::getRemarks).isNotNull(Psnode::getRemarks),n->(String)n);
+                        .select(Psnode::getRemarks).isNotNull(Psnode::getHbId),n->(String)n);
         list.forEach(e->{
             valueOperations.getOperations().opsForHash().put(HB_NODE_FILE_HASH,e,"node");
         });
@@ -435,7 +452,7 @@ public class DataMergeServiceImpl implements DataMergeService {
         //2. 查询出pipe表中的数据  存入redis
         List<String> list2 =
                 pspipeService.listObjs(Wrappers.<Pspipe>lambdaQuery()
-                        .select(Pspipe::getRemarks).isNotNull(Pspipe::getRemarks),n->(String)n);
+                        .select(Pspipe::getRemarks).isNotNull(Pspipe::getHbId),n->(String)n);
         list2.forEach(e->{
             valueOperations.getOperations().opsForHash().put(HB_NODE_FILE_HASH,e,"pipe");
         });

+ 3 - 0
sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/java/com/tofly/dataImport/task/DataPullTask.java

@@ -13,6 +13,9 @@ import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 
+/**
+ * 洪波数据拉取接口
+ */
 @EnableScheduling
 @EnableAsync
 @Slf4j

+ 1 - 1
sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/resources/mapper/FileSourceMapper.xml

@@ -14,7 +14,7 @@
             <result property="hbId" column="HB_ID"/>
             <result property="uploader" column="UPLOADER"/>
         <result property="hbScanId" column="hb_scan_id"/>
-            
+        <result property="localUrl" column="local_url"/>
   </resultMap>
   
 </mapper>

+ 1 - 1
sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/resources/mapper/PsnodeMapper.xml

@@ -77,7 +77,7 @@
                       <result property="pbslevelId" column="PBSLEVEL_ID"/>
                       <result property="structureId" column="STRUCTURE_ID"/>
                       <result property="pid" column="PID"/>
-
+      <result property="hbId" column="hb_id"/>
                       <result property="layerName" column="LAYER_NAME"/>
                       <result property="elementCode" column="ELEMENT_CODE"/>
                       

+ 1 - 1
sxgk-projet/sxgk-dataImport/dataImport-boot/src/main/resources/mapper/PspipeMapper.xml

@@ -115,7 +115,7 @@
       <result property="materialCount" column="MATERIAL_COUNT"/>
       <result property="prjId" column="PRJ_ID"/>
       <result property="isPublish" column="IS_PUBLISH" />
-      
+      <result property="hbId" column="hb_id"/>
 
         <!--<association property="elementCodeName"  column="ELEMENT_CODE" select="getelementCodeName" ></association>-->
       <!--<association property="isScan"  column="CODE" select="getIsScan" ></association>-->

+ 6 - 6
sxgk-projet/sxgk/src/main/resources/customMapper/CustomPersonWorkBenchMapper.xml

@@ -604,10 +604,10 @@
             )
         </if>
         <if test="query.beginDate !=null and query. beginDate !=''">
-            <![CDATA[    and a.create_time >= to_date(#{query.beginDate},'yyyymmdd') ]]>
+            <![CDATA[    and a.create_time >= to_date(#{query.beginDate},'yyyy/mm/dd') ]]>
         </if>
         <if test="query.endDate !=null and query. endDate !=''">
-            <![CDATA[   and a.create_time <=  to_date(#{query.endDate},'yyyymmdd') +1]]>
+            <![CDATA[   and a.create_time <=  to_date(#{query.endDate},'yyyy/mm/dd') +1]]>
         </if>
         <if test="query.departmentName !=null and query.departmentName !=''">
             and  (instr(t.BUILD_UNIT,#{query.departmentName})>0
@@ -732,10 +732,10 @@
            and  temp1.prj_id=#{query.prjId}
         </if>
         <if test="query.beginDate !=null and query. beginDate !=''">
-            <![CDATA[    and temp1.create_time >= to_date(#{query.beginDate},'yyyymmdd') ]]>
+            <![CDATA[    and temp1.create_time >= to_date(#{query.beginDate},'yyyy/mm/dd') ]]>
         </if>
         <if test="query.endDate !=null and query. endDate !=''">
-            <![CDATA[   and temp1.create_time <=  to_date(#{query.endDate},'yyyymmdd')+1]]>
+            <![CDATA[   and temp1.create_time <=  to_date(#{query.endDate},'yyyy/mm/dd')+1]]>
         </if>
 
         <if test="query.projectName !=null and query.projectName !=''" >
@@ -825,10 +825,10 @@
         </if>
 
         <if test="query.beginDate !=null and query. beginDate !=''">
-            <![CDATA[    and temp1.create_time >= to_date(#{query.beginDate},'yyyymmdd') ]]>
+            <![CDATA[    and temp1.create_time >= to_date(#{query.beginDate},'yyyy/mm/dd') ]]>
         </if>
         <if test="query.endDate !=null and query. endDate !=''">
-            <![CDATA[   and temp1.create_time <=  to_date(#{query.endDate},'yyyymmdd')+1]]>
+            <![CDATA[   and temp1.create_time <=  to_date(#{query.endDate},'yyyy/mm/dd')+1]]>
         </if>
         group by
         to_char(create_time,'yyyymmdd')