欢迎您访问 最编程 本站为您分享编程语言代码,编程技术文章!
您现在的位置是: 首页

深入解析气象grb2文件的实例

最编程 2024-01-09 07:59:41
...

因为工作需求,需要解析grb 文件中的预报气象数据先来了解一下气象文件是什么东西 

grb格式文件是国际气象组织一种独特的数据存储格式 ,是一种网格的数据格式每个格点代表着一个坐标类似这种

在地图上以网格状的形式表达气象数据,通常这个网格数据都是一组,类似这样的

但是因为业务需要数据存储格式是以经纬度为主键获取这个经纬度下的所有时间数据图解

所以解析代码不在是一个面一个面保存的

所需要导入的包MAVEN

<repositories>
        <repository>
            <id>unidata</id>
            <name>THREDDS</name>
            <url>https://artifacts.unidata.ucar.edu/content/repositories/unidata-releases/</url>
        </repository>
    </repositories>


       <dependency>   
            <groupId>joda-time</groupId>
            <artifactId>joda-time</artifactId>
            <version>2.3</version>
        </dependency>
        <dependency>
            <groupId>org.glassfish</groupId>
            <artifactId>javax.json</artifactId>
            <version>1.0.3</version>
        </dependency>
        <dependency>
            <groupId>com.lexicalscope.jewelcli</groupId>
            <artifactId>jewelcli</artifactId>
            <version>0.8.8</version>
        </dependency>
        <dependency>
            <groupId>edu.ucar</groupId>
            <artifactId>grib</artifactId>
            <version>4.3.19</version>
        </dependency>

解析代码

package com.kedalo.databus.grib2;

import org.bson.Document;
import org.bson.conversions.Bson;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;

import com.alibaba.fastjson.JSON;
import com.kedalo.databus.comm.KafkaClient;
import com.kedalo.databus.comm.MongoClientCommon;
import com.kedalo.databus.content.SpringContext;
import com.kedalo.databus.util.WeatherUtil;
import com.lexicalscope.jewel.cli.CliFactory;
import com.mongodb.client.model.Filters;

import ucar.grib.grib2.*;
import ucar.unidata.io.RandomAccessFile;
import java.io.*;
import java.text.SimpleDateFormat;
import java.util.*;

import javax.annotation.Resource;
import javax.print.Doc;
  

/**
 * @author liyulong
 * Date 2019-09-17
 * 解析Grib文件
 */
public final class Grib2Json{

    private static final Logger log = LoggerFactory.getLogger(Grib2Json.class);
    private static final Logger gribLog = LoggerFactory.getLogger("grib");
    private final File file;
    private final Options option;
	SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
    MongoClientCommon mongoClient = (MongoClientCommon) SpringContext.getBean(MongoClientCommon.class);

    public Grib2Json(File file, Options option) {
        if (!file.exists()) {
        	log.error("Cannot find input file {0}",file);
            throw new IllegalArgumentException("Cannot find input file: " + file);
        }
        this.file = file;
        this.option = option;
    }

  
    private void readOther(RandomAccessFile raf, Grib2Input input, Options options,String table,String topic,KafkaClient kafka) throws IOException {
    	gribLog.info("=====================读取{}文件=============================",options.getFile().getName());
    	 Date now = new Date();
        List<Grib2Record> records = input.getRecords();
        int page = records.size()/2;//获取页数
        float startLon = 0;//开始经度
        float startLat = 0;//开始纬度
        float x = 0;//纬度每次变化的值
        float y = 0;//经度每次变化的值
        float endLon=0;//结束的经度
        float endLat=0;//结束的纬度
        int forecast = 0;//每多少小时
        float tempLon = 0;//递增坐标点
        float tempLat = 0;//递增坐标点
        Map<String,Document> map = new HashMap<String, Document>();//所有数据存储
        for (int i = 0; i < page; i++) {//遍历页数
        	Grib2Record temp = records.get(i);
        	GribRecord rw = new GribRecord(temp, options);
        	//获取data 数据
        	float[] data = new Grib2Data(raf).getData(temp.getGdsOffset(), temp.getPdsOffset(), temp.getId().getRefTime());
        	Map<String,Object> headMap = rw.getHead();
        	//获取开始经纬度
        	startLon  = WeatherUtil.toFloat((float)headMap.get("lo1"));
        	startLat	 = WeatherUtil.toFloat((float)headMap.get("la1"));
        	//获取每页递增的经纬度距离
        	x = WeatherUtil.toFloat((float) headMap.get("dx"));
        	y = WeatherUtil.toFloat((float) headMap.get("dy"));
        	//计算结束经纬度坐标
        	endLat = WeatherUtil.toFloat((float) headMap.get("la2"));
        	endLon = WeatherUtil.toFloat((float) headMap.get("lo2"));
        	//获取第一次间隔时间
        	if(forecast <= 0){
        		forecast = (int)headMap.get("forecastTime");  
        	}
        	//获取起始检测时间
        	Object refTime = headMap.get("refTime");
        	//获取数据类型
        	int number = (int)headMap.get("parameterNumber");
        	//计算经纬度个栅状格点X和Y
        	int lonCount = WeatherUtil.scale(WeatherUtil.sub(endLon, startLon)/y, 0).intValue();
            int latCount = WeatherUtil.scale(WeatherUtil.sub(endLat, startLat)/x, 0).intValue();
            //遍历格点数据
            tempLat = startLat;
            int count = 0;
            for (int j = 0; j <= latCount; j++) {
        		tempLon = startLon;
        		for (int j2 = 0; j2 <= lonCount; j2++) {
        			tempLon = WeatherUtil.format(tempLon);
                	tempLat =  WeatherUtil.format(tempLat);
        			String key = tempLon+","+tempLat;
        			String mongoKey = getByKey(number);
        			Document doc = map.get(key);
        			boolean flg = false;
        			if(doc == null ){//判断数据池里是否存在相同坐标点
        				flg = true;
        			}
        			if(flg){//没有数据则添加数据有就修改
        				doc =  new Document();
        				Map<String, Object> documentMap = new HashMap<String, Object>(); 
             			documentMap.put("type", "Point");
             			List<Float> list = new ArrayList<Float>();
             			list.add(tempLon);
             			list.add(tempLat);
             			documentMap.put("coordinates",list);
                     	doc.append("loc",documentMap);
                     	doc.append("locStr",tempLon+","+tempLat);
                     	doc.append("forecastTime",forecast).append("refTime", refTime);
                     	doc.append("saveTime", sdf.format(now));
                     	List<Float> dataList = new ArrayList<Float>();
                     	dataList.add(data[count]);
                     	doc.put(mongoKey,dataList);
                     	map.put(key, doc);
        			}else{
        				((List)doc.get(mongoKey)).add(data[count]);
        			}
        			count++;
        			tempLon += y;
				}
        		tempLat +=x;
			}
        }
        List<Document> dataList = new ArrayList<Document>();
        for (String str : map.keySet()) {
        	Document doc =  map.get(str);
        	dataList.add(doc);
        }
       
        String dataNmae = table+"-"+forecast;
        gribLog.info("清空{}数据",dataNmae);
        mongoClient.drop(dataNmae);
        gribLog.info("{}存储数据中....",dataNmae);
        mongoClient.insertMany(dataNmae, dataList);
        gribLog.info("{}存储完毕",dataNmae);
        gribLog.info("{}创建索引",dataNmae);
        mongoClient.createIndex(dataNmae, "loc", "2dsphere");
        gribLog.info("读取{}文件 共计{}条数据保存 完毕!",options.getFile().getName(),map.size());
    }
    
    public void readEDA10(RandomAccessFile raf, Grib2Input input, Options options,String table,String topic,KafkaClient kafka) throws IOException{
    	gribLog.info("=====================读取{}文件=============================",options.getFile().getName());
    	 Date now = new Date();
        List<Grib2Record> records = input.getRecords();
        int page = records.size()/2;//获取页数
        float startLon = 0;//开始经度
        float startLat = 0;//开始纬度
        float x = 0;//纬度每次变化的值
        float y = 0;//经度每次变化的值
        float endLon=0;//结束的经度
        float endLat=0;//结束的纬度
        int forecast = 0;//每多少小时
        float tempLon = 0;//递增坐标点
        float tempLat = 0;//递增坐标点
        Map<String,Document> map = new LinkedHashMap<String, Document>();//所有数据存储
        for (int i = 0; i < page; i++) {//遍历页数
        	Grib2Record temp = records.get(i);
        	GribRecord rw = new GribRecord(temp, options);
        	//获取data 数据
        	float[] data = new Grib2Data(raf).getData(temp.getGdsOffset(), temp.getPdsOffset(), temp.getId().getRefTime());
        	Map<String,Object> headMap = rw.getHead();
        	
        	//获取开始经纬度
        	startLon  = WeatherUtil.toFloat((float)headMap.get("lo1"));
        	startLat	 = WeatherUtil.toFloat((float)headMap.get("la1"));
        	//获取每页递增的经纬度距离
        	x = WeatherUtil.toFloat((float) headMap.get("dx"));
        	y = WeatherUtil.toFloat((float) headMap.get("dy"));
        	//计算结束经纬度坐标
        	endLat = WeatherUtil.toFloat((float) headMap.get("la2"));
        	endLon = WeatherUtil.toFloat((float) headMap.get("lo2"));
        	if(endLat < startLat ){
        		x=-x;
        	}
        	if(endLon < startLon){
        		y = -y;
        	}
        	//获取第一次间隔时间
        	if(forecast <= 0){
        		forecast = (int)headMap.get("forecastTime");  
        	}
        	//获取起始检测时间
        	Object refTime = headMap.get("refTime");
        	//获取数据类型
        	int number = (int)headMap.get("parameterNumber");
        	//计算经纬度个栅状格点X和Y
        	int lonCount = Math.abs(WeatherUtil.scale(WeatherUtil.sub(endLon, startLon)/Math.abs(y), 0).intValue());
            int latCount = Math.abs(WeatherUtil.scale(WeatherUtil.sub(endLat, startLat)/Math.abs(x), 0).intValue());
            //遍历格点数据
            tempLat = startLat;
            int count = 0;
         	for (int j = 0; j <= latCount; j++) {
        		
        		tempLon = startLon;
        		for (int j2 = 0; j2 <= lonCount; j2++) {
        			tempLon = WeatherUtil.format(tempLon);
                	tempLat =  WeatherUtil.format(tempLat);
        			String key = tempLon+","+tempLat;
        			String mongoKey = getByKey(number);
        			Document doc = map.get(key);
        			boolean flg = false;
        			if(doc != null ){//判断数据池里是否存在相同坐标点
        				if(doc.get("v") == null && doc.get("u") == null){
        					flg = true;
        				}else{
        					List attrDoc = doc.get("v",List.class);
							if (attrDoc == null){
								attrDoc = doc.get("u",List.class);
							}
							List list = doc.get(mongoKey, List.class);
							if(list == null){
								list = new ArrayList<Float>();
							}
							list.add(data[count]);
	                     	doc.put(mongoKey, list);
        				}
        			}else{
        				flg = true;
        			}
        			if(flg){//没有数据则添加数据有就修改
        				doc =  new Document();
        				Map<String, Object> documentMap = new HashMap<String, Object>(); 
             			documentMap.put("type", "Point");
             			List<Float> list = new ArrayList<Float>();
             			list.add(tempLon);
             			list.add(tempLat);
             			documentMap.put("coordinates",list);
                     	doc.append("loc",documentMap);
                     	doc.append("locStr",tempLon+","+tempLat);
                     	doc.append("forecastTime",forecast).append("refTime", refTime);
                     	doc.append("saveTime", sdf.format(now));
                     	List<Float> dataList = new ArrayList<Float>();
                     	dataList.add(data[count]);
                     	doc.put(mongoKey,dataList);
                     	map.put(key, doc);
        			}
        			tempLon += y;
        			count++;
				}
        		tempLat +=x;
			}
        }
        String dataNmae = table+"-"+forecast;
        gribLog.info("{}存储数据中....",dataNmae);
        mongoClient.insertManyOfUpdate(dataNmae, map, "locStr");
        gribLog.info("读取{}文件 共计{}条数据保存 完毕!",options.getFile().getName(),map.size());
    }
    

    
    
    
    
    
    public String getByKey(int key){
    	switch (key) {
    		case 0:
    			return "tmp";//温度	 
    		case 1:
    			return "tc";//总云量
    		case 2:
    			return "u";//风速
    		case 3:
    			return "v";//风向
    		case 4:
    			return "maxt";//最大温度
    		case 5:
    			return "mint";//最小温度
			case 11:
				
				return "other";
			case 8:  
				return "tr";//总降雨量
			case 19:
				return "pt"; //降雨类型
			default:
				return "unkown";
		}
    }
    
    
 
    public void read(String table,String topic,KafkaClient kafka,String type) throws IOException {
        RandomAccessFile raf = new RandomAccessFile(file.getPath(), "r");
        raf.order(RandomAccessFile.BIG_ENDIAN);
        Grib2Input input = new Grib2Input(raf);
        /** 对于这个方法如果文件为grib2 的话执行此方法不必 除以2
         *  但是 执行这个方法后会去扫描一次这个文件 所以再scan 会阻塞一段时间执行
         *  不执行的话 判断是否是grib2 是则可以再record 除以2
         *  */
//        input.getEdition();
        input.scan(false, false);
        if(type.equals("EDA10") ){
        	readEDA10(raf, input, option,table,topic,kafka);
        }else{
        	readOther(raf, input, option,table,topic,kafka);
        }
    }

}
package com.kedalo.databus.grib2;

import javax.json.JsonNumber;
import java.math.BigDecimal;
import java.math.BigInteger;


/**
 * 2014-01-17
 * @author liyulong
 */
final class FloatValue implements JsonNumber {

    private final float value;
    private BigDecimal bd;

    FloatValue(float value) {
        this.value = value;
    }

    @Override 
    public ValueType getValueType() {
        return ValueType.NUMBER;
    }

    @Override 
    public String toString() {
        if (Float.isNaN(value)) {
            return "\"NaN\"";
        }
        else if (value == Float.POSITIVE_INFINITY) {
            return "\"-Infinity\"";
        }
        else if (value == Float.NEGATIVE_INFINITY) {
            return "\"Infinity\"";
        }
        else {
            return Float.toString(value);
        }
    }

    @Override 
    public boolean isIntegral() {
        return bigDecimalValue().scale() == 0;
    }

    @Override 
    public int intValue() {
        return (int)value;
    }

    @Override 
    public int intValueExact() {
        return bigDecimalValue().intValueExact();
    }

    @Override 
    public long longValue() {
        return (long)value;
    }

    @Override 
    public long longValueExact() {
        return bigDecimalValue().longValueExact();
    }

    @Override 
    public BigInteger bigIntegerValue() {
        return bigDecimalValue().toBigInteger();
    }

    @Override 
    public BigInteger bigIntegerValueExact() {
        return bigDecimalValue().toBigIntegerExact();
    }

    @Override 
    public double doubleValue() {
        return (double)value;
    }

    @Override 
    public BigDecimal bigDecimalValue() {
        return bd != null ? bd : (bd = new BigDecimal(value));
    }

    @Override 
    public boolean equals(Object that) {
        return that instanceof JsonNumber && this.bigDecimalValue().equals(((JsonNumber)that).bigDecimalValue());
    }

    @Override 
    public int hashCode() {
        return bigDecimalValue().hashCode();
    }
}
package com.kedalo.databus.grib2;

import static ucar.grib.GribNumbers.BIT_5;
import static ucar.grib.GribNumbers.UNDEFINED;
import static ucar.grib.GribNumbers.isBitSet;
import static ucar.grib.grib2.Grib2Tables.codeTable3_1;
import static ucar.grib.grib2.Grib2Tables.codeTable3_2;
import static ucar.grib.grib2.Grib2Tables.codeTable4_0;
import static ucar.grib.grib2.Grib2Tables.codeTable4_3;
import static ucar.grib.grib2.Grib2Tables.codeTable4_5;
import static ucar.grib.grib2.ParameterTable.getCategoryName;
import static ucar.grib.grib2.ParameterTable.getParameterName;
import static ucar.grib.grib2.ParameterTable.getParameterUnit;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;

import com.kedalo.databus.bean.GribWeather;
import com.kedalo.databus.util.WeatherUtil;

import ucar.grib.grib2.Grib2Data;
import ucar.grib.grib2.Grib2GDSVariables;
import ucar.grib.grib2.Grib2IdentificationSection;
import ucar.grib.grib2.Grib2IndicatorSection;
import ucar.grib.grib2.Grib2Pds;
import ucar.grib.grib2.Grib2Record;
/**
 * @author liyulong
 * @date 2019-09-17
 * grib 保存
 * */
public class GribRecord {
	
	  private final Grib2Record record;
	  private final Grib2IndicatorSection ins;
	  private final Options options;
	  private final Grib2IdentificationSection ids;
	  private final Grib2Pds pds;
	  private final Grib2GDSVariables gds;
	  
	 
	  
	  public GribRecord(Grib2Record record, Options options) {
	        this.record = record;
	        this.options = options;
	        this.ins = record.getIs();
	        this.ids = record.getId();
	        this.pds = record.getPDS().getPdsVars();
	        this.gds = record.getGDS().getGdsVars();
	 }
	  
	 
	 public Map<String,Object> getHead(){
		 int productDef = pds.getProductDefinitionTemplate();
	     int discipline = ins.getDiscipline();
	     int paramCategory = pds.getParameterCategory();
	     int paramNumber = pds.getParameterNumber();
	     int gridTemplate = gds.getGdtn();
		 Map<String,Object> recordData = new HashMap<String,Object>();
		//学科
		 recordData.put("discipline", ins.getDiscipline());
		 recordData.put("disciplineName", ins.getDisciplineName());
		 //Grib
		 recordData.put("gribEdition", ins.getGribEdition());
			//中心点 固定值38
		 recordData.put("center", ids.getCenter_id());
		 recordData.put("centerName",WeatherUtil.codeToStr(ids.getCenter_id()));
	        //子中心点
		 recordData.put("subcenter", ids.getSubcenter_id());
		 
		 recordData.put("timeLong", ids.getRefTime());
		 recordData.put("refTime", new Date(ids.getRefTime())); 
//		 recordData.put("refTime", new DateTime(ids.getRefTime()).toDate());
		 recordData.put("time", WeatherUtil.addHourTime(new DateTime(ids.getRefTime()).toDate(), pds.getForecastTime()));
		 recordData.put("significanceOfRT", ids.getSignificanceOfRT());
		 recordData.put("significanceOfRTName",  ids.getSignificanceOfRTName());
		 recordData.put("productStatus", ids.getProductStatus());
		 recordData.put("productStatusName", ids.getProductStatusName());
		 recordData.put("productType", ids.getProductType());
		 recordData.put("productTypeName", ids.getProductStatusName());
		 recordData.put("productDefinitionTemplate", productDef);
		 recordData.put("productDefinitionTemplateName", codeTable4_0(productDef));
		 recordData.put("parameterCategory", paramCategory);
		 recordData.put("parameterCategoryName",getCategoryName(discipline, paramCategory));
		 recordData.put("parameterNumber", paramNumber);
		 recordData.put("parameterNumberName",getParameterName(discipline, paramCategory, paramNumber));
		 recordData.put("parameterUnit", getParameterUnit(discipline, paramCategory, paramNumber));
		 recordData.put("genProcessType", pds.getGenProcessType());
		 recordData.put("genProcessTypeName", codeTable4_3(pds.getGenProcessType()));
		 recordData.put("forecastTime", pds.getForecastTime());
		 recordData.put("surface1Type", pds.getLevelType1());
		 recordData.put("surface1TypeName", codeTable4_5(pds.getLevelType1()));
		 recordData.put("surface1Value", pds.getLevelValue1());
		 recordData.put("surface2Type", pds.getLevelType2());
		 recordData.put("surface2TypeName",codeTable4_5(pds.getLevelType2()));
		 recordData.put("surface2Value", pds.getLevelValue2());
		 recordData.put("gridDefinitionTemplate", gridTemplate);
		 recordData.put("gridDefinitionTemplateName",codeTable3_1(gridTemplate));
		 recordData.put("numberPoints", gds.getNumberPoints());
		 switch (gridTemplate) {
	            case 0:  // Template 3.0
	            case 1:  // Template 3.1
	            case 2:  // Template 3.2
	            case 3:  // Template 3.3
	            	writeLonLatGrid(recordData);
	            break;
	            case 10:  // Template 3.10
	                writeMercatorGrid(recordData);
	                break;
	            case 20:  // Template 3.20
	                writePolarStereographicGrid(recordData);
	                break;
	            case 30:  // Template 3.30
	                writeLambertConformalGrid(recordData);
	                break;
	            case 40:  // Template 3.40
	            case 41:  // Template 3.41
	            case 42:  // Template 3.42
	            case 43:  // Template 3.43
	                writeLonLatGrid(recordData);
	                break;
	            case 90:  // Template 3.90
	                writeSpaceOrOrthographicGrid(recordData);
	                break;
	            case 204:  // Template 3.204
	                writeCurvilinearGrid(recordData);
	                break;
	        }
	        
		 return recordData;
	    
	}
	 private void writeCurvilinearGrid(Map<String,Object> recordData) {
	        writeGridShape(recordData);
	        writeGridSize(recordData);
	}
	 
	 private void writeSpaceOrOrthographicGrid(Map<String,Object> recordData) {
	        writeGridShape(recordData);
	        writeGridSize(recordData);
	        writeAngle(recordData);
	        writeLonLatBounds(recordData);
	        recordData.put("lop", gds.getLop()); 
	        recordData.put("lap", gds.getLap()); 
	        recordData.put("xp", gds.getXp());    
	        recordData.put("yp", gds.getYp());   
	        recordData.put("nr", gds.getNr());   
	        recordData.put("xo", gds.getXo());   
	        recordData.put("yo", gds.getYo());   
	 }
	 
	 private void writeGridShape(Map<String,Object> recordData) {
		  recordData.put("shape",codeTable3_2(gds.getShape()));
		  recordData.put("shapeName", codeTable3_2(gds.getShape()));
	        switch (gds.getShape()) {
	            case 1:
	            	 recordData.put("earthRadius", gds.getEarthRadius());
	                break;
	            case 3: 
	            	 recordData.put("majorAxis", gds.getMajorAxis());
	            	 recordData.put("minorAxis", gds.getMinorAxis());
	                break;
	        }
	 }
	 
	 private void writeGridSize(Map<String,Object> recordData) {
		 recordData.put("gridUnits", gds.getGridUnits());
		 recordData.put("resolution", gds.getResolution());
		 recordData.put("winds", isBitSet(gds.getResolution(), BIT_5) ? "relative" : "true");
		 recordData.put("scanMode", gds.getScanMode());
		 recordData.put("nx", gds.getNx());  
		 recordData.put("ny", gds.getNy());  
	}
	
	private void writeAngle(Map<String,Object> recordData) {
		recordData.put("angle", gds.getAngle());
		recordData.put("basicAngle", gds.getBasicAngle());
		recordData.put("subDivisions", gds.getSubDivisions());
	}
	 
	
	  
	  private void putIfSet(Map<String,Object> recordData,String key,float value){
		    if ( value != UNDEFINED) {
		    	recordData.put(key, value);
	        }
	  }
	  
	  private void writeLonLatBounds(Map<String,Object> recordData) {
		  putIfSet(recordData,"lo1",gds.getLo1());
		  putIfSet(recordData,"la1",gds.getLa1());
		  putIfSet(recordData,"lo2",gds.getLo2());
		  putIfSet(recordData,"la2",gds.getLa2());
		  putIfSet(recordData,"dx",gds.getDx());
		  putIfSet(recordData,"dy",gds.getDy());
		  	
	    }
	  
	  private void writeRotationAndStretch(Map<String,Object> recordData) {
		  putIfSet(recordData,"spLon", gds.getSpLon()); 
		  putIfSet(recordData,"spLat", gds.getSpLat());
		  putIfSet(recordData,"rotationAngle", gds.getRotationAngle());
		  putIfSet(recordData,"poleLon", gds.getPoleLon()); 
		  putIfSet(recordData,"poleLat", gds.getPoleLat()); 
		  putIfSet(recordData,"stretchingFactor", gds.getStretchingFactor());
	 }
	  
	 private void writeLonLatGrid(Map<String,Object> recordData) {
		 writeGridShape(recordData);
		 writeGridSize(recordData);
		 writeAngle(recordData);
		 writeLonLatBounds(recordData);
		 writeRotationAndStretch(recordData);
		 putIfSet(recordData,"np", gds.getNp());  
	 }
	  
	 private void writeMercatorGrid(Map<String,Object> recordData) {
		 writeGridShape(recordData);
	     writeGridSize(recordData);
	     writeAngle(recordData);
	     writeLonLatBounds(recordData);
	}
	 private void writePolarStereographicGrid(Map<String,Object> recordData) {
		 writeGridShape(recordData);
		 writeGridSize(recordData);
		 writeLonLatBounds(recordData);
	 }
	 private void writeLambertConformalGrid(Map<String,Object> recordData) {
		 writeGridShape(recordData);
	     writeGridSize(recordData);
	     writeLonLatBounds(recordData);
	     writeRotationAndStretch(recordData);
	     recordData.put("laD", gds.getLaD());
	     recordData.put("loV", gds.getLoV());
	     recordData.put("projectionFlag", gds.getProjectionFlag());
	     recordData.put("latin1", gds.getLatin1()); 
	     recordData.put("latin2", gds.getLatin2());  
	}
	 
	 List<FloatValue> readData(Grib2Data gd) throws IOException {
		 List<FloatValue> list = new ArrayList<FloatValue>();
		 float[] data = gd.getData(record.getGdsOffset(), record.getPdsOffset(), ids.getRefTime());
		 if (data != null) {
			 for (float value : data) {
				 list.add(new FloatValue(value));
			 }
	     }
		 return list;
	}
	
}
package com.kedalo.databus.grib2;

import com.lexicalscope.jewel.cli.*;

import java.io.File;


/**
 * @date 2019-09-17
 *
 *
 * @author liyulong
 * 该功能实现一部分 后续可扩展
 */
@CommandLineInterface(application="grib2json", order=OptionOrder.LONGNAME)
public interface Options {

    @Option(longName="help", shortName="h", description="帮助命令")
    boolean getShowHelp();

    @Option(longName="names", shortName="n", description="打印返回字段名称")
    boolean getPrintNames();

    @Option(longName="data", shortName="d", description="打印数据字段")
    boolean getPrintData();

    @Option(longName="compact", shortName="c", description="转化为JSON格式")
    boolean isCompactFormat();

    @Option(longName="verbose", shortName="v", description="启动日志记录")
    boolean getEnableLogging();

    @Option(
        longName="output",
        shortName="o",
        description="输出命令  例:-o 文件路径",
        defaultToNull=true)
    File getOutput();

    @Unparsed(name="FILE", defaultToNull=true)
    File getFile();
    @Option(
        longName={"filter.discipline", "fd"},
        description="行业过滤起 暂时无用",
        defaultToNull=true)
    Integer getFilterDiscipline();

    @Option(
        longName={"filter.category", "fc"},
        description="类型过滤器 暂时无用",
        defaultToNull=true)
    Integer getFilterCategory();

    @Option(
        longName={"filter.parameter", "fp"},
        description="参数过滤器  暂时无用",
        defaultToNull=true)
    String getFilterParameter();

    @Option(
        longName={"filter.surface", "fs"},
        description="surface 字段暂时无用",
        defaultToNull=true)
    Integer getFilterSurface();

    @Option(
        longName={"filter.value", "fv"},
        description="过滤 surface 内容 暂时无用",
        defaultToNull=true)
    Double getFilterValue();

   
}

 

推荐阅读