深入解析气象grb2文件的实例
最编程
2024-01-09 07:59:41
...
因为工作需求,需要解析grb 文件中的预报气象数据先来了解一下气象文件是什么东西
grb格式文件是国际气象组织一种独特的数据存储格式 ,是一种网格的数据格式每个格点代表着一个坐标类似这种
在地图上以网格状的形式表达气象数据,通常这个网格数据都是一组,类似这样的
但是因为业务需要数据存储格式是以经纬度为主键获取这个经纬度下的所有时间数据图解
所以解析代码不在是一个面一个面保存的
所需要导入的包MAVEN
<repositories>
<repository>
<id>unidata</id>
<name>THREDDS</name>
<url>https://artifacts.unidata.ucar.edu/content/repositories/unidata-releases/</url>
</repository>
</repositories>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<version>2.3</version>
</dependency>
<dependency>
<groupId>org.glassfish</groupId>
<artifactId>javax.json</artifactId>
<version>1.0.3</version>
</dependency>
<dependency>
<groupId>com.lexicalscope.jewelcli</groupId>
<artifactId>jewelcli</artifactId>
<version>0.8.8</version>
</dependency>
<dependency>
<groupId>edu.ucar</groupId>
<artifactId>grib</artifactId>
<version>4.3.19</version>
</dependency>
解析代码
package com.kedalo.databus.grib2;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import com.alibaba.fastjson.JSON;
import com.kedalo.databus.comm.KafkaClient;
import com.kedalo.databus.comm.MongoClientCommon;
import com.kedalo.databus.content.SpringContext;
import com.kedalo.databus.util.WeatherUtil;
import com.lexicalscope.jewel.cli.CliFactory;
import com.mongodb.client.model.Filters;
import ucar.grib.grib2.*;
import ucar.unidata.io.RandomAccessFile;
import java.io.*;
import java.text.SimpleDateFormat;
import java.util.*;
import javax.annotation.Resource;
import javax.print.Doc;
/**
* @author liyulong
* Date 2019-09-17
* 解析Grib文件
*/
public final class Grib2Json{
private static final Logger log = LoggerFactory.getLogger(Grib2Json.class);
private static final Logger gribLog = LoggerFactory.getLogger("grib");
private final File file;
private final Options option;
SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
MongoClientCommon mongoClient = (MongoClientCommon) SpringContext.getBean(MongoClientCommon.class);
public Grib2Json(File file, Options option) {
if (!file.exists()) {
log.error("Cannot find input file {0}",file);
throw new IllegalArgumentException("Cannot find input file: " + file);
}
this.file = file;
this.option = option;
}
private void readOther(RandomAccessFile raf, Grib2Input input, Options options,String table,String topic,KafkaClient kafka) throws IOException {
gribLog.info("=====================读取{}文件=============================",options.getFile().getName());
Date now = new Date();
List<Grib2Record> records = input.getRecords();
int page = records.size()/2;//获取页数
float startLon = 0;//开始经度
float startLat = 0;//开始纬度
float x = 0;//纬度每次变化的值
float y = 0;//经度每次变化的值
float endLon=0;//结束的经度
float endLat=0;//结束的纬度
int forecast = 0;//每多少小时
float tempLon = 0;//递增坐标点
float tempLat = 0;//递增坐标点
Map<String,Document> map = new HashMap<String, Document>();//所有数据存储
for (int i = 0; i < page; i++) {//遍历页数
Grib2Record temp = records.get(i);
GribRecord rw = new GribRecord(temp, options);
//获取data 数据
float[] data = new Grib2Data(raf).getData(temp.getGdsOffset(), temp.getPdsOffset(), temp.getId().getRefTime());
Map<String,Object> headMap = rw.getHead();
//获取开始经纬度
startLon = WeatherUtil.toFloat((float)headMap.get("lo1"));
startLat = WeatherUtil.toFloat((float)headMap.get("la1"));
//获取每页递增的经纬度距离
x = WeatherUtil.toFloat((float) headMap.get("dx"));
y = WeatherUtil.toFloat((float) headMap.get("dy"));
//计算结束经纬度坐标
endLat = WeatherUtil.toFloat((float) headMap.get("la2"));
endLon = WeatherUtil.toFloat((float) headMap.get("lo2"));
//获取第一次间隔时间
if(forecast <= 0){
forecast = (int)headMap.get("forecastTime");
}
//获取起始检测时间
Object refTime = headMap.get("refTime");
//获取数据类型
int number = (int)headMap.get("parameterNumber");
//计算经纬度个栅状格点X和Y
int lonCount = WeatherUtil.scale(WeatherUtil.sub(endLon, startLon)/y, 0).intValue();
int latCount = WeatherUtil.scale(WeatherUtil.sub(endLat, startLat)/x, 0).intValue();
//遍历格点数据
tempLat = startLat;
int count = 0;
for (int j = 0; j <= latCount; j++) {
tempLon = startLon;
for (int j2 = 0; j2 <= lonCount; j2++) {
tempLon = WeatherUtil.format(tempLon);
tempLat = WeatherUtil.format(tempLat);
String key = tempLon+","+tempLat;
String mongoKey = getByKey(number);
Document doc = map.get(key);
boolean flg = false;
if(doc == null ){//判断数据池里是否存在相同坐标点
flg = true;
}
if(flg){//没有数据则添加数据有就修改
doc = new Document();
Map<String, Object> documentMap = new HashMap<String, Object>();
documentMap.put("type", "Point");
List<Float> list = new ArrayList<Float>();
list.add(tempLon);
list.add(tempLat);
documentMap.put("coordinates",list);
doc.append("loc",documentMap);
doc.append("locStr",tempLon+","+tempLat);
doc.append("forecastTime",forecast).append("refTime", refTime);
doc.append("saveTime", sdf.format(now));
List<Float> dataList = new ArrayList<Float>();
dataList.add(data[count]);
doc.put(mongoKey,dataList);
map.put(key, doc);
}else{
((List)doc.get(mongoKey)).add(data[count]);
}
count++;
tempLon += y;
}
tempLat +=x;
}
}
List<Document> dataList = new ArrayList<Document>();
for (String str : map.keySet()) {
Document doc = map.get(str);
dataList.add(doc);
}
String dataNmae = table+"-"+forecast;
gribLog.info("清空{}数据",dataNmae);
mongoClient.drop(dataNmae);
gribLog.info("{}存储数据中....",dataNmae);
mongoClient.insertMany(dataNmae, dataList);
gribLog.info("{}存储完毕",dataNmae);
gribLog.info("{}创建索引",dataNmae);
mongoClient.createIndex(dataNmae, "loc", "2dsphere");
gribLog.info("读取{}文件 共计{}条数据保存 完毕!",options.getFile().getName(),map.size());
}
public void readEDA10(RandomAccessFile raf, Grib2Input input, Options options,String table,String topic,KafkaClient kafka) throws IOException{
gribLog.info("=====================读取{}文件=============================",options.getFile().getName());
Date now = new Date();
List<Grib2Record> records = input.getRecords();
int page = records.size()/2;//获取页数
float startLon = 0;//开始经度
float startLat = 0;//开始纬度
float x = 0;//纬度每次变化的值
float y = 0;//经度每次变化的值
float endLon=0;//结束的经度
float endLat=0;//结束的纬度
int forecast = 0;//每多少小时
float tempLon = 0;//递增坐标点
float tempLat = 0;//递增坐标点
Map<String,Document> map = new LinkedHashMap<String, Document>();//所有数据存储
for (int i = 0; i < page; i++) {//遍历页数
Grib2Record temp = records.get(i);
GribRecord rw = new GribRecord(temp, options);
//获取data 数据
float[] data = new Grib2Data(raf).getData(temp.getGdsOffset(), temp.getPdsOffset(), temp.getId().getRefTime());
Map<String,Object> headMap = rw.getHead();
//获取开始经纬度
startLon = WeatherUtil.toFloat((float)headMap.get("lo1"));
startLat = WeatherUtil.toFloat((float)headMap.get("la1"));
//获取每页递增的经纬度距离
x = WeatherUtil.toFloat((float) headMap.get("dx"));
y = WeatherUtil.toFloat((float) headMap.get("dy"));
//计算结束经纬度坐标
endLat = WeatherUtil.toFloat((float) headMap.get("la2"));
endLon = WeatherUtil.toFloat((float) headMap.get("lo2"));
if(endLat < startLat ){
x=-x;
}
if(endLon < startLon){
y = -y;
}
//获取第一次间隔时间
if(forecast <= 0){
forecast = (int)headMap.get("forecastTime");
}
//获取起始检测时间
Object refTime = headMap.get("refTime");
//获取数据类型
int number = (int)headMap.get("parameterNumber");
//计算经纬度个栅状格点X和Y
int lonCount = Math.abs(WeatherUtil.scale(WeatherUtil.sub(endLon, startLon)/Math.abs(y), 0).intValue());
int latCount = Math.abs(WeatherUtil.scale(WeatherUtil.sub(endLat, startLat)/Math.abs(x), 0).intValue());
//遍历格点数据
tempLat = startLat;
int count = 0;
for (int j = 0; j <= latCount; j++) {
tempLon = startLon;
for (int j2 = 0; j2 <= lonCount; j2++) {
tempLon = WeatherUtil.format(tempLon);
tempLat = WeatherUtil.format(tempLat);
String key = tempLon+","+tempLat;
String mongoKey = getByKey(number);
Document doc = map.get(key);
boolean flg = false;
if(doc != null ){//判断数据池里是否存在相同坐标点
if(doc.get("v") == null && doc.get("u") == null){
flg = true;
}else{
List attrDoc = doc.get("v",List.class);
if (attrDoc == null){
attrDoc = doc.get("u",List.class);
}
List list = doc.get(mongoKey, List.class);
if(list == null){
list = new ArrayList<Float>();
}
list.add(data[count]);
doc.put(mongoKey, list);
}
}else{
flg = true;
}
if(flg){//没有数据则添加数据有就修改
doc = new Document();
Map<String, Object> documentMap = new HashMap<String, Object>();
documentMap.put("type", "Point");
List<Float> list = new ArrayList<Float>();
list.add(tempLon);
list.add(tempLat);
documentMap.put("coordinates",list);
doc.append("loc",documentMap);
doc.append("locStr",tempLon+","+tempLat);
doc.append("forecastTime",forecast).append("refTime", refTime);
doc.append("saveTime", sdf.format(now));
List<Float> dataList = new ArrayList<Float>();
dataList.add(data[count]);
doc.put(mongoKey,dataList);
map.put(key, doc);
}
tempLon += y;
count++;
}
tempLat +=x;
}
}
String dataNmae = table+"-"+forecast;
gribLog.info("{}存储数据中....",dataNmae);
mongoClient.insertManyOfUpdate(dataNmae, map, "locStr");
gribLog.info("读取{}文件 共计{}条数据保存 完毕!",options.getFile().getName(),map.size());
}
public String getByKey(int key){
switch (key) {
case 0:
return "tmp";//温度
case 1:
return "tc";//总云量
case 2:
return "u";//风速
case 3:
return "v";//风向
case 4:
return "maxt";//最大温度
case 5:
return "mint";//最小温度
case 11:
return "other";
case 8:
return "tr";//总降雨量
case 19:
return "pt"; //降雨类型
default:
return "unkown";
}
}
public void read(String table,String topic,KafkaClient kafka,String type) throws IOException {
RandomAccessFile raf = new RandomAccessFile(file.getPath(), "r");
raf.order(RandomAccessFile.BIG_ENDIAN);
Grib2Input input = new Grib2Input(raf);
/** 对于这个方法如果文件为grib2 的话执行此方法不必 除以2
* 但是 执行这个方法后会去扫描一次这个文件 所以再scan 会阻塞一段时间执行
* 不执行的话 判断是否是grib2 是则可以再record 除以2
* */
// input.getEdition();
input.scan(false, false);
if(type.equals("EDA10") ){
readEDA10(raf, input, option,table,topic,kafka);
}else{
readOther(raf, input, option,table,topic,kafka);
}
}
}
package com.kedalo.databus.grib2;
import javax.json.JsonNumber;
import java.math.BigDecimal;
import java.math.BigInteger;
/**
* 2014-01-17
* @author liyulong
*/
final class FloatValue implements JsonNumber {
private final float value;
private BigDecimal bd;
FloatValue(float value) {
this.value = value;
}
@Override
public ValueType getValueType() {
return ValueType.NUMBER;
}
@Override
public String toString() {
if (Float.isNaN(value)) {
return "\"NaN\"";
}
else if (value == Float.POSITIVE_INFINITY) {
return "\"-Infinity\"";
}
else if (value == Float.NEGATIVE_INFINITY) {
return "\"Infinity\"";
}
else {
return Float.toString(value);
}
}
@Override
public boolean isIntegral() {
return bigDecimalValue().scale() == 0;
}
@Override
public int intValue() {
return (int)value;
}
@Override
public int intValueExact() {
return bigDecimalValue().intValueExact();
}
@Override
public long longValue() {
return (long)value;
}
@Override
public long longValueExact() {
return bigDecimalValue().longValueExact();
}
@Override
public BigInteger bigIntegerValue() {
return bigDecimalValue().toBigInteger();
}
@Override
public BigInteger bigIntegerValueExact() {
return bigDecimalValue().toBigIntegerExact();
}
@Override
public double doubleValue() {
return (double)value;
}
@Override
public BigDecimal bigDecimalValue() {
return bd != null ? bd : (bd = new BigDecimal(value));
}
@Override
public boolean equals(Object that) {
return that instanceof JsonNumber && this.bigDecimalValue().equals(((JsonNumber)that).bigDecimalValue());
}
@Override
public int hashCode() {
return bigDecimalValue().hashCode();
}
}
package com.kedalo.databus.grib2;
import static ucar.grib.GribNumbers.BIT_5;
import static ucar.grib.GribNumbers.UNDEFINED;
import static ucar.grib.GribNumbers.isBitSet;
import static ucar.grib.grib2.Grib2Tables.codeTable3_1;
import static ucar.grib.grib2.Grib2Tables.codeTable3_2;
import static ucar.grib.grib2.Grib2Tables.codeTable4_0;
import static ucar.grib.grib2.Grib2Tables.codeTable4_3;
import static ucar.grib.grib2.Grib2Tables.codeTable4_5;
import static ucar.grib.grib2.ParameterTable.getCategoryName;
import static ucar.grib.grib2.ParameterTable.getParameterName;
import static ucar.grib.grib2.ParameterTable.getParameterUnit;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import com.kedalo.databus.bean.GribWeather;
import com.kedalo.databus.util.WeatherUtil;
import ucar.grib.grib2.Grib2Data;
import ucar.grib.grib2.Grib2GDSVariables;
import ucar.grib.grib2.Grib2IdentificationSection;
import ucar.grib.grib2.Grib2IndicatorSection;
import ucar.grib.grib2.Grib2Pds;
import ucar.grib.grib2.Grib2Record;
/**
* @author liyulong
* @date 2019-09-17
* grib 保存
* */
public class GribRecord {
private final Grib2Record record;
private final Grib2IndicatorSection ins;
private final Options options;
private final Grib2IdentificationSection ids;
private final Grib2Pds pds;
private final Grib2GDSVariables gds;
public GribRecord(Grib2Record record, Options options) {
this.record = record;
this.options = options;
this.ins = record.getIs();
this.ids = record.getId();
this.pds = record.getPDS().getPdsVars();
this.gds = record.getGDS().getGdsVars();
}
public Map<String,Object> getHead(){
int productDef = pds.getProductDefinitionTemplate();
int discipline = ins.getDiscipline();
int paramCategory = pds.getParameterCategory();
int paramNumber = pds.getParameterNumber();
int gridTemplate = gds.getGdtn();
Map<String,Object> recordData = new HashMap<String,Object>();
//学科
recordData.put("discipline", ins.getDiscipline());
recordData.put("disciplineName", ins.getDisciplineName());
//Grib
recordData.put("gribEdition", ins.getGribEdition());
//中心点 固定值38
recordData.put("center", ids.getCenter_id());
recordData.put("centerName",WeatherUtil.codeToStr(ids.getCenter_id()));
//子中心点
recordData.put("subcenter", ids.getSubcenter_id());
recordData.put("timeLong", ids.getRefTime());
recordData.put("refTime", new Date(ids.getRefTime()));
// recordData.put("refTime", new DateTime(ids.getRefTime()).toDate());
recordData.put("time", WeatherUtil.addHourTime(new DateTime(ids.getRefTime()).toDate(), pds.getForecastTime()));
recordData.put("significanceOfRT", ids.getSignificanceOfRT());
recordData.put("significanceOfRTName", ids.getSignificanceOfRTName());
recordData.put("productStatus", ids.getProductStatus());
recordData.put("productStatusName", ids.getProductStatusName());
recordData.put("productType", ids.getProductType());
recordData.put("productTypeName", ids.getProductStatusName());
recordData.put("productDefinitionTemplate", productDef);
recordData.put("productDefinitionTemplateName", codeTable4_0(productDef));
recordData.put("parameterCategory", paramCategory);
recordData.put("parameterCategoryName",getCategoryName(discipline, paramCategory));
recordData.put("parameterNumber", paramNumber);
recordData.put("parameterNumberName",getParameterName(discipline, paramCategory, paramNumber));
recordData.put("parameterUnit", getParameterUnit(discipline, paramCategory, paramNumber));
recordData.put("genProcessType", pds.getGenProcessType());
recordData.put("genProcessTypeName", codeTable4_3(pds.getGenProcessType()));
recordData.put("forecastTime", pds.getForecastTime());
recordData.put("surface1Type", pds.getLevelType1());
recordData.put("surface1TypeName", codeTable4_5(pds.getLevelType1()));
recordData.put("surface1Value", pds.getLevelValue1());
recordData.put("surface2Type", pds.getLevelType2());
recordData.put("surface2TypeName",codeTable4_5(pds.getLevelType2()));
recordData.put("surface2Value", pds.getLevelValue2());
recordData.put("gridDefinitionTemplate", gridTemplate);
recordData.put("gridDefinitionTemplateName",codeTable3_1(gridTemplate));
recordData.put("numberPoints", gds.getNumberPoints());
switch (gridTemplate) {
case 0: // Template 3.0
case 1: // Template 3.1
case 2: // Template 3.2
case 3: // Template 3.3
writeLonLatGrid(recordData);
break;
case 10: // Template 3.10
writeMercatorGrid(recordData);
break;
case 20: // Template 3.20
writePolarStereographicGrid(recordData);
break;
case 30: // Template 3.30
writeLambertConformalGrid(recordData);
break;
case 40: // Template 3.40
case 41: // Template 3.41
case 42: // Template 3.42
case 43: // Template 3.43
writeLonLatGrid(recordData);
break;
case 90: // Template 3.90
writeSpaceOrOrthographicGrid(recordData);
break;
case 204: // Template 3.204
writeCurvilinearGrid(recordData);
break;
}
return recordData;
}
private void writeCurvilinearGrid(Map<String,Object> recordData) {
writeGridShape(recordData);
writeGridSize(recordData);
}
private void writeSpaceOrOrthographicGrid(Map<String,Object> recordData) {
writeGridShape(recordData);
writeGridSize(recordData);
writeAngle(recordData);
writeLonLatBounds(recordData);
recordData.put("lop", gds.getLop());
recordData.put("lap", gds.getLap());
recordData.put("xp", gds.getXp());
recordData.put("yp", gds.getYp());
recordData.put("nr", gds.getNr());
recordData.put("xo", gds.getXo());
recordData.put("yo", gds.getYo());
}
private void writeGridShape(Map<String,Object> recordData) {
recordData.put("shape",codeTable3_2(gds.getShape()));
recordData.put("shapeName", codeTable3_2(gds.getShape()));
switch (gds.getShape()) {
case 1:
recordData.put("earthRadius", gds.getEarthRadius());
break;
case 3:
recordData.put("majorAxis", gds.getMajorAxis());
recordData.put("minorAxis", gds.getMinorAxis());
break;
}
}
private void writeGridSize(Map<String,Object> recordData) {
recordData.put("gridUnits", gds.getGridUnits());
recordData.put("resolution", gds.getResolution());
recordData.put("winds", isBitSet(gds.getResolution(), BIT_5) ? "relative" : "true");
recordData.put("scanMode", gds.getScanMode());
recordData.put("nx", gds.getNx());
recordData.put("ny", gds.getNy());
}
private void writeAngle(Map<String,Object> recordData) {
recordData.put("angle", gds.getAngle());
recordData.put("basicAngle", gds.getBasicAngle());
recordData.put("subDivisions", gds.getSubDivisions());
}
private void putIfSet(Map<String,Object> recordData,String key,float value){
if ( value != UNDEFINED) {
recordData.put(key, value);
}
}
private void writeLonLatBounds(Map<String,Object> recordData) {
putIfSet(recordData,"lo1",gds.getLo1());
putIfSet(recordData,"la1",gds.getLa1());
putIfSet(recordData,"lo2",gds.getLo2());
putIfSet(recordData,"la2",gds.getLa2());
putIfSet(recordData,"dx",gds.getDx());
putIfSet(recordData,"dy",gds.getDy());
}
private void writeRotationAndStretch(Map<String,Object> recordData) {
putIfSet(recordData,"spLon", gds.getSpLon());
putIfSet(recordData,"spLat", gds.getSpLat());
putIfSet(recordData,"rotationAngle", gds.getRotationAngle());
putIfSet(recordData,"poleLon", gds.getPoleLon());
putIfSet(recordData,"poleLat", gds.getPoleLat());
putIfSet(recordData,"stretchingFactor", gds.getStretchingFactor());
}
private void writeLonLatGrid(Map<String,Object> recordData) {
writeGridShape(recordData);
writeGridSize(recordData);
writeAngle(recordData);
writeLonLatBounds(recordData);
writeRotationAndStretch(recordData);
putIfSet(recordData,"np", gds.getNp());
}
private void writeMercatorGrid(Map<String,Object> recordData) {
writeGridShape(recordData);
writeGridSize(recordData);
writeAngle(recordData);
writeLonLatBounds(recordData);
}
private void writePolarStereographicGrid(Map<String,Object> recordData) {
writeGridShape(recordData);
writeGridSize(recordData);
writeLonLatBounds(recordData);
}
private void writeLambertConformalGrid(Map<String,Object> recordData) {
writeGridShape(recordData);
writeGridSize(recordData);
writeLonLatBounds(recordData);
writeRotationAndStretch(recordData);
recordData.put("laD", gds.getLaD());
recordData.put("loV", gds.getLoV());
recordData.put("projectionFlag", gds.getProjectionFlag());
recordData.put("latin1", gds.getLatin1());
recordData.put("latin2", gds.getLatin2());
}
List<FloatValue> readData(Grib2Data gd) throws IOException {
List<FloatValue> list = new ArrayList<FloatValue>();
float[] data = gd.getData(record.getGdsOffset(), record.getPdsOffset(), ids.getRefTime());
if (data != null) {
for (float value : data) {
list.add(new FloatValue(value));
}
}
return list;
}
}
package com.kedalo.databus.grib2;
import com.lexicalscope.jewel.cli.*;
import java.io.File;
/**
* @date 2019-09-17
*
*
* @author liyulong
* 该功能实现一部分 后续可扩展
*/
@CommandLineInterface(application="grib2json", order=OptionOrder.LONGNAME)
public interface Options {
@Option(longName="help", shortName="h", description="帮助命令")
boolean getShowHelp();
@Option(longName="names", shortName="n", description="打印返回字段名称")
boolean getPrintNames();
@Option(longName="data", shortName="d", description="打印数据字段")
boolean getPrintData();
@Option(longName="compact", shortName="c", description="转化为JSON格式")
boolean isCompactFormat();
@Option(longName="verbose", shortName="v", description="启动日志记录")
boolean getEnableLogging();
@Option(
longName="output",
shortName="o",
description="输出命令 例:-o 文件路径",
defaultToNull=true)
File getOutput();
@Unparsed(name="FILE", defaultToNull=true)
File getFile();
@Option(
longName={"filter.discipline", "fd"},
description="行业过滤起 暂时无用",
defaultToNull=true)
Integer getFilterDiscipline();
@Option(
longName={"filter.category", "fc"},
description="类型过滤器 暂时无用",
defaultToNull=true)
Integer getFilterCategory();
@Option(
longName={"filter.parameter", "fp"},
description="参数过滤器 暂时无用",
defaultToNull=true)
String getFilterParameter();
@Option(
longName={"filter.surface", "fs"},
description="surface 字段暂时无用",
defaultToNull=true)
Integer getFilterSurface();
@Option(
longName={"filter.value", "fv"},
description="过滤 surface 内容 暂时无用",
defaultToNull=true)
Double getFilterValue();
}
推荐阅读
-
Java 类加载器的作用 - 简介:类加载器是 Java™ 中一个非常重要的概念。类加载器负责将 Java 类的字节码加载到 Java 虚拟机中。本文首先详细介绍了 Java 类加载器的基本概念,包括代理模型、加载类的具体过程和线程上下文类加载器等。然后介绍了如何开发自己的类加载器,最后介绍了类加载器在 Web 容器和 OSGi™ 中的应用。 类加载器是 Java 语言的一项创新,也是 Java 语言广受欢迎的重要原因之一。它允许将 Java 类动态加载到 Java 虚拟机中并执行。类加载器从 JDK 1.0 开始出现,最初是为了满足 Java Applets 的需求而开发的,Java Applets 需要从远程位置下载 Java 类文件并在浏览器中执行。现在,类加载器已广泛应用于网络容器和 OSGi。一般来说,Java 应用程序的开发人员不需要直接与类加载器交互;Java 虚拟机的默认行为足以应对大多数情况。但是,如果遇到需要与类加载器交互的情况,而您又不太了解类加载器的机制,就很容易花费大量时间调试异常,如 ClassNotFoundException 和 NoClassDefFoundError。本文将详细介绍 Java 的类加载器,帮助读者深入理解 Java 语言中的这一重要概念。下面先介绍一些基本概念。 类加载器的基本概念 顾名思义,类加载器用于将 Java 类加载到 Java 虚拟机中。一般来说,Java 虚拟机以如下方式使用 Java 类:Java 源程序(.java 文件)经 Java 编译器编译后转换为 Java 字节代码(.class 文件)。类加载器负责读取 Java 字节代码并将其转换为 java.lang 实例。每个实例都用来表示一个 Java 类。通过该实例的 newInstance 方法创建该类的对象。实际情况可能更加复杂,例如,Java 字节代码可能是由工具动态生成或通过网络下载的。 基本上,所有类加载器都是 java.lang.ClassLoader 类的实例。下面将详细介绍这个 Java 类。 java.lang.ClassLoader 类简介 java.lang.ClassLoader 类的基本职责是根据给定类的名称为其查找或生成相应的字节码,然后根据这些字节码定义一个 Java 类,即 java.lang.Class 类的实例。除此之外,ClassLoader 还负责加载 Java 应用程序所需的资源,如图像文件和配置文件。不过,本文只讨论它加载类的功能。为了履行加载类的职责,ClassLoader 提供了许多方法,其中比较重要的方法如表 1 所示。下文将详细介绍这些方法。 表 1.与加载类相关的 ClassLoader 方法
-
玩转Kotlin性能测试:JMH入门指南一 - 测试基础" "深入理解JMH在Kotlin中的应用:基准测试实战解析" "轻松实践Kotlin基准测试:JMH工具详解与实例总结
-
深入理解Spring事务的传播机制:实例解析
-
深入理解Linux指令 | tar命令实操指南:打包与解压文件的艺术,广泛应用于备份与文件压缩——第二部分,操作技巧解析
-
深入解析 Kickstart 配置文件的各项参数详细指南
-
深度学习中的不确定性量化:2020年实用技术与应用大解析 - 61页精华解读" 这份报告深入剖析了近年来深度学习领域中不确定性量化(UQ)技术的最新发展,包括其在强化学习(RL)中的运用实例。探讨了贝叶斯近似和集成学习等主流UQ方法在各个具体场景中的广泛应用,比如自动驾驶、目标识别、图像修复、医疗影像分析(如分类和分割)、文本理解(如文本分类和风险评估)、以及生物信息学等多个领域。 报告进一步梳理了UQ方法在深度学习领域的关键应用案例,并针对当前面临的挑战及未来研究方向进行了概览和展望,为这一领域的研究人员和实践者提供了有价值的参考指南。
-
理解与实战:Java SSH库JSch - 用途解析、四大认证法、无密码登录设置、SSH公钥验证深入讲解、三种选择方案、SFTP文件传输详解、Maven集成及实用代码实例 - 专讲JSch在SFTP文件传输中的应用
-
详解泛娱乐技术服务报告——深入剖析各类娱乐形态下的架构与实例——具体解析3.2 游戏领域的泛娱乐服务技术(12)
-
深入理解常见的单例模式 - 饿汉式实例解析
-
深入解析mysql-bin.000001文件的由来和应对策略