Commit 14a8a379 authored by George Tziavas's avatar George Tziavas
Browse files

MetricoService proble with Autowired (L24) and it does not instantiate PrometheusQueries

parent e569205a
Loading
Loading
Loading
Loading
Loading
+15 −15
Original line number Diff line number Diff line
@@ -32,20 +32,20 @@ public class MetricoController {
        return new ResponseEntity<>("Application is running", HttpStatus.OK);
    }

    @GetMapping("/queryPrometheus")
    public ResponseEntity<String> queryPrometheus(
            @RequestParam(defaultValue = "https") String protocol,
            @RequestParam(required = false) String prom_ip,
            @RequestParam(defaultValue = "9000") String prom_port,
            @RequestParam String query
    ) {
        if (prom_ip == null) {
            return new ResponseEntity<>("prom_ip parameter is missing", HttpStatus.BAD_REQUEST);
        }
        String prom_url = protocol + "://" + prom_ip + ":" + prom_port;
        String[] prometheusData = PrometheusQueries.sendQueryToPrometheus(prom_url, query).split("\n");
        return new ResponseEntity<>(Arrays.toString(prometheusData), HttpStatus.OK);
    }
//    @GetMapping("/queryPrometheus")
//    public ResponseEntity<String> queryPrometheus(
//            @RequestParam(defaultValue = "https") String protocol,
//            @RequestParam(required = false) String prom_ip,
//            @RequestParam(defaultValue = "9000") String prom_port,
//            @RequestParam String query
//    ) {
//        if (prom_ip == null) {
//            return new ResponseEntity<>("prom_ip parameter is missing", HttpStatus.BAD_REQUEST);
//        }
//        String prom_url = protocol + "://" + prom_ip + ":" + prom_port;
//        String[] prometheusData = PrometheusQueries.sendQueryToPrometheus(prom_url, query).split("\n");
//        return new ResponseEntity<>(Arrays.toString(prometheusData), HttpStatus.OK);
//    }

    @PostMapping("/startPeriodicQuery")
    public ResponseEntity<String> startPeriodicQuery(@RequestBody PeriodicQueryRequest request) {
+2 −1
Original line number Diff line number Diff line
@@ -17,7 +17,8 @@ import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
        "org.etsi.osl.metrico.repo",
        "org.etsi.osl.metrico.model",
        "org.etsi.osl.metrico",
        "org.etsi.osl.metrico.reposervices"
        "org.etsi.osl.metrico.reposervices",
        "org.etsi.osl.metrico.services"
})
        public class MetricoSpringBoot implements CommandLineRunner {

+25 −13
Original line number Diff line number Diff line
@@ -11,10 +11,24 @@ import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * The {@code JobMapper} class provides methods to map between different job representations.
 */
public class JobMapper {

    private static final Logger logger = LoggerFactory.getLogger(JobMapper.class);

    /**
     * Maps a {@link MeasurementCollectionJob} to a {@link Job}.
     *
     * @param measurementCollectionJob the measurement collection job to map
     * @return the mapped job
     * @throws IllegalArgumentException if the data access endpoint is not exactly one,
     *                                  if the API type is not supported or not defined,
     *                                  if the data access endpoint URI is null,
     *                                  or if there is not exactly one query
     */

    public static Job measurementCollectionJobMapToJob(MeasurementCollectionJob measurementCollectionJob) {
        Job job = new Job();

@@ -41,26 +55,24 @@ public class JobMapper {
            }
        }

        if(measurementCollectionJob.getJobCollectionFilter().getMappings().size() == 1){
            DataFilterMap query = measurementCollectionJob.getJobCollectionFilter();
                // String stringQuery = measurementCollectionJob.getJobCollectionFilter().getMappings().get(0).getFilterTemplate().getDescription();
            job.setQuery(query);
        } else {
            throw new IllegalArgumentException("There should be exactly one query");
        }

        if (measurementCollectionJob.getScheduleDefinition().size() == 1) {
            job.setStartDateTime(measurementCollectionJob.getScheduleDefinition().get(0).getScheduleDefinitionStartTime());
            job.setEndDateTime(measurementCollectionJob.getScheduleDefinition().get(0).getScheduleDefinitionEndTime());
        }


        if (measurementCollectionJob.getGranularity() != null){
            Granularity granularity = measurementCollectionJob.getGranularity();
            job.setExecutionInterval(convertGranularityToSeconds(granularity.getValue()));
        }

        @Valid Granularity granularity = measurementCollectionJob.getGranularity();
        job.setExecutionInterval(convertGranularityToSeconds(measurementCollectionJob.getGranularity().getValue()));

        logger.atDebug().setMessage("Received MeasurementCollectionJob:\n" + measurementCollectionJob + "\nConverted it to Job:\n" + job).log();
        return job;
    }

    public static int convertGranularityToSeconds(String value) {
        Pattern PATTERN = Pattern.compile("G_(\\d+)(SEC|MN|H|D|M|Y)");
        Pattern PATTERN = Pattern.compile("G_(\\d+)(SEC|MN|H|D|M|Y)", Pattern.CASE_INSENSITIVE);
        if (Granularity.contains(value)) {
            Matcher matcher = PATTERN.matcher(value);
            if (matcher.matches()) {
@@ -69,7 +81,7 @@ public class JobMapper {
                if(value.equalsIgnoreCase(Granularity.NA.getValue())){
                    return Integer.parseInt(null);
                }
                return switch (unit) {
                return switch (unit.toUpperCase()) {
                    case "SEC" -> amount;
                    case "MIN" -> amount * 60;
                    case "H" -> amount * 3600;
+0 −2
Original line number Diff line number Diff line
@@ -47,8 +47,6 @@ public class Job{

    private URI dataAccessEndPointUri;

    private DataFilterMap query;

    private String apiType;

    @JsonIgnore
+4 −5
Original line number Diff line number Diff line
@@ -7,7 +7,6 @@ import org.etsi.osl.tmf.pm628.model.ExecutionStateType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.util.UriComponentsBuilder;
@@ -29,17 +28,17 @@ public class PrometheusQueries {
        this.jobService = jobService;
    }

    public static String sendQueryToPrometheus(String prometheusUrl, String query){
    public String sendQueryToPrometheus(String prometheusUrl, String query){
        RestTemplate restTemplate = new RestTemplate();

        UriComponentsBuilder builder = UriComponentsBuilder.fromHttpUrl(prometheusUrl)
                .path("/api/v1/query")
                .queryParam("query", query);
        logger.atInfo().log("Prometheus URL: " + prometheusUrl + " Query: " + query);
        logger.atInfo().log("Sent query at prometheus with URL: " + prometheusUrl + "with query: " + query);

        ResponseEntity<String> response = restTemplate.getForEntity(builder.toUriString(), String.class);

        return response.getBody();
        logger.atDebug().log("Received " + response.getBody());
        return response.getBody();  // This will be sent to the MQ
    }

    public Job startPeriodicQuery(String prometheusUrl, String query, OffsetDateTime startDateTime, OffsetDateTime endDateTime, Integer executionInterval) {
Loading