Skip to content
Snippets Groups Projects
Commit 14a8a379 authored by George Tziavas's avatar George Tziavas
Browse files

MetricoService proble with Autowired (L24) and it does not instantiate PrometheusQueries

parent e569205a
No related branches found
No related tags found
2 merge requests!5MR for Release 2024Q4,!1Creating first version of metrico
Pipeline #10236 failed
Showing
with 209 additions and 48 deletions
......@@ -32,20 +32,20 @@ public class MetricoController {
return new ResponseEntity<>("Application is running", HttpStatus.OK);
}
@GetMapping("/queryPrometheus")
public ResponseEntity<String> queryPrometheus(
@RequestParam(defaultValue = "https") String protocol,
@RequestParam(required = false) String prom_ip,
@RequestParam(defaultValue = "9000") String prom_port,
@RequestParam String query
) {
if (prom_ip == null) {
return new ResponseEntity<>("prom_ip parameter is missing", HttpStatus.BAD_REQUEST);
}
String prom_url = protocol + "://" + prom_ip + ":" + prom_port;
String[] prometheusData = PrometheusQueries.sendQueryToPrometheus(prom_url, query).split("\n");
return new ResponseEntity<>(Arrays.toString(prometheusData), HttpStatus.OK);
}
// @GetMapping("/queryPrometheus")
// public ResponseEntity<String> queryPrometheus(
// @RequestParam(defaultValue = "https") String protocol,
// @RequestParam(required = false) String prom_ip,
// @RequestParam(defaultValue = "9000") String prom_port,
// @RequestParam String query
// ) {
// if (prom_ip == null) {
// return new ResponseEntity<>("prom_ip parameter is missing", HttpStatus.BAD_REQUEST);
// }
// String prom_url = protocol + "://" + prom_ip + ":" + prom_port;
// String[] prometheusData = PrometheusQueries.sendQueryToPrometheus(prom_url, query).split("\n");
// return new ResponseEntity<>(Arrays.toString(prometheusData), HttpStatus.OK);
// }
@PostMapping("/startPeriodicQuery")
public ResponseEntity<String> startPeriodicQuery(@RequestBody PeriodicQueryRequest request) {
......@@ -68,7 +68,7 @@ public class MetricoController {
}
String prom_url = request.getProtocol() + "://" + request.getProm_ip() + ":" + request.getProm_port();
Job newPeriodicQuery = prometheusQueries.startPeriodicQuery(prom_url, request.getQuery(), request.getStartDateTime(), request.getEndDateTime(), request.getExecutionInterval());
if(newPeriodicQuery.getState()== ExecutionStateType.FAILED){
if(newPeriodicQuery.getState() == ExecutionStateType.FAILED){
return new ResponseEntity<>("Periodic query failed to start due to internal error.", HttpStatus.INTERNAL_SERVER_ERROR);
}
return new ResponseEntity<>("Periodic query started, with ID: " + newPeriodicQuery.getUuid(), HttpStatus.OK);
......
......@@ -17,7 +17,8 @@ import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
"org.etsi.osl.metrico.repo",
"org.etsi.osl.metrico.model",
"org.etsi.osl.metrico",
"org.etsi.osl.metrico.reposervices"
"org.etsi.osl.metrico.reposervices",
"org.etsi.osl.metrico.services"
})
public class MetricoSpringBoot implements CommandLineRunner {
......
......@@ -11,10 +11,24 @@ import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* The {@code JobMapper} class provides methods to map between different job representations.
*/
public class JobMapper {
private static final Logger logger = LoggerFactory.getLogger(JobMapper.class);
/**
* Maps a {@link MeasurementCollectionJob} to a {@link Job}.
*
* @param measurementCollectionJob the measurement collection job to map
* @return the mapped job
* @throws IllegalArgumentException if the data access endpoint is not exactly one,
* if the API type is not supported or not defined,
* if the data access endpoint URI is null,
* or if there is not exactly one query
*/
public static Job measurementCollectionJobMapToJob(MeasurementCollectionJob measurementCollectionJob) {
Job job = new Job();
......@@ -41,26 +55,24 @@ public class JobMapper {
}
}
if(measurementCollectionJob.getJobCollectionFilter().getMappings().size() == 1){
DataFilterMap query = measurementCollectionJob.getJobCollectionFilter();
// String stringQuery = measurementCollectionJob.getJobCollectionFilter().getMappings().get(0).getFilterTemplate().getDescription();
job.setQuery(query);
} else {
throw new IllegalArgumentException("There should be exactly one query");
if (measurementCollectionJob.getScheduleDefinition().size() == 1) {
job.setStartDateTime(measurementCollectionJob.getScheduleDefinition().get(0).getScheduleDefinitionStartTime());
job.setEndDateTime(measurementCollectionJob.getScheduleDefinition().get(0).getScheduleDefinitionEndTime());
}
if (measurementCollectionJob.getGranularity() != null){
Granularity granularity = measurementCollectionJob.getGranularity();
job.setExecutionInterval(convertGranularityToSeconds(granularity.getValue()));
}
job.setStartDateTime(measurementCollectionJob.getScheduleDefinition().get(0).getScheduleDefinitionStartTime());
job.setEndDateTime(measurementCollectionJob.getScheduleDefinition().get(0).getScheduleDefinitionEndTime());
@Valid Granularity granularity = measurementCollectionJob.getGranularity();
job.setExecutionInterval(convertGranularityToSeconds(measurementCollectionJob.getGranularity().getValue()));
logger.atDebug().setMessage("Received MeasurementCollectionJob:\n" + measurementCollectionJob + "\nConverted it to Job:\n" + job).log();
return job;
}
public static int convertGranularityToSeconds(String value) {
Pattern PATTERN = Pattern.compile("G_(\\d+)(SEC|MN|H|D|M|Y)");
Pattern PATTERN = Pattern.compile("G_(\\d+)(SEC|MN|H|D|M|Y)", Pattern.CASE_INSENSITIVE);
if (Granularity.contains(value)) {
Matcher matcher = PATTERN.matcher(value);
if (matcher.matches()) {
......@@ -69,7 +81,7 @@ public class JobMapper {
if(value.equalsIgnoreCase(Granularity.NA.getValue())){
return Integer.parseInt(null);
}
return switch (unit) {
return switch (unit.toUpperCase()) {
case "SEC" -> amount;
case "MIN" -> amount * 60;
case "H" -> amount * 3600;
......
......@@ -47,8 +47,6 @@ public class Job{
private URI dataAccessEndPointUri;
private DataFilterMap query;
private String apiType;
@JsonIgnore
......
......@@ -7,7 +7,6 @@ import org.etsi.osl.tmf.pm628.model.ExecutionStateType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.util.UriComponentsBuilder;
......@@ -29,17 +28,17 @@ public class PrometheusQueries {
this.jobService = jobService;
}
public static String sendQueryToPrometheus(String prometheusUrl, String query){
public String sendQueryToPrometheus(String prometheusUrl, String query){
RestTemplate restTemplate = new RestTemplate();
UriComponentsBuilder builder = UriComponentsBuilder.fromHttpUrl(prometheusUrl)
.path("/api/v1/query")
.queryParam("query", query);
logger.atInfo().log("Prometheus URL: " + prometheusUrl + " Query: " + query);
logger.atInfo().log("Sent query at prometheus with URL: " + prometheusUrl + "with query: " + query);
ResponseEntity<String> response = restTemplate.getForEntity(builder.toUriString(), String.class);
return response.getBody();
logger.atDebug().log("Received " + response.getBody());
return response.getBody(); // This will be sent to the MQ
}
public Job startPeriodicQuery(String prometheusUrl, String query, OffsetDateTime startDateTime, OffsetDateTime endDateTime, Integer executionInterval) {
......
......@@ -2,10 +2,19 @@ package org.etsi.osl.metrico.repo;
import org.etsi.osl.metrico.model.Job;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
@Repository
public interface JobRepository extends JpaRepository<Job, UUID> {
@Query("SELECT j FROM Job j WHERE j.deleted = false")
List<Job> findAllActiveJobs();
@Query("SELECT j FROM Job j WHERE j.uuid = :id AND j.deleted = false")
Optional<Job> findActiveById(@Param("id") UUID id);
}
\ No newline at end of file
......@@ -16,6 +16,9 @@ public class JobRepoService {
private static final Logger logger = LoggerFactory.getLogger(JobRepoService.class);
private void logJobNotFoundError(UUID jobId) {
logger.error("Job with ID {} not found.", jobId);
}
private JobRepository jobRepository;
@Autowired
......@@ -38,8 +41,8 @@ public class JobRepoService {
// Retrieve the existing Job
Job job = jobRepository.findById(jobId).orElse(null);
if (job == null) {
logger.error("Job with ID {} not found.", jobId);
return null; // Or throw an exception
logJobNotFoundError(jobId);
return null;
}
// Update the fields as needed
if (newState != null) job.setState(newState);
......@@ -55,12 +58,24 @@ public class JobRepoService {
// Retrieve the existing Job
Job job = jobRepository.findById(jobId).orElse(null);
if (job == null) {
logger.error("Job with ID {} not found.", jobId);
return null; // Or throw an exception
logJobNotFoundError(jobId);
return null;
}
// Update the fields as needed
if (newState != null) job.setState(newState);
// Save the updated Job back to the database
return jobRepository.save(job);
}
public boolean softDeleteJob(UUID jobId) {
Job job = jobRepository.findById(jobId).orElse(null);
if (job == null) {
logJobNotFoundError(jobId);
return false;
}
job.setDeleted(true);
jobRepository.save(job);
return true;
}
}
package org.etsi.osl.metrico.services;
import jakarta.validation.constraints.NotNull;
import org.etsi.osl.metrico.mapper.JobMapper;
import org.etsi.osl.metrico.model.Job;
import org.etsi.osl.metrico.prometheus.PrometheusQueries;
import org.etsi.osl.tmf.pm628.model.ExecutionStateType;
import org.etsi.osl.tmf.pm628.model.MeasurementCollectionJob;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import java.net.URI;
import java.time.OffsetDateTime;
public class MetricoService {
private static final Logger logger = LoggerFactory.getLogger(JobService.class);
private final PrometheusQueries prometheusQueries;
@Autowired
public MetricoService(PrometheusQueries prometheusQueries) {
this.prometheusQueries = prometheusQueries;
}
public String sendQueryToPrometheus(String param1, String param2) {
return prometheusQueries.sendQueryToPrometheus(param1, param2);
}
public String[] queryToPrometheus(@NotNull MeasurementCollectionJob givenMCJ){
Job job = JobMapper.measurementCollectionJobMapToJob(givenMCJ);
URI uri = job.getDataAccessEndPointUri();
String promURL = job.getDataAccessEndPointUri().getScheme() + "://" + job.getDataAccessEndPointUri().getAuthority();
String promQuery = job.getDataAccessEndPointUri().getQuery();
promQuery = promQuery.replace("query=", "");
return sendQueryToPrometheus(promURL, promQuery).split("\n");
}
public void startPeriodicQueryToPrometheus(@NotNull MeasurementCollectionJob givenMCJ){
Job job = JobMapper.measurementCollectionJobMapToJob(givenMCJ);
String promURL = job.getDataAccessEndPointUri().getScheme() + "://" + job.getDataAccessEndPointUri().getAuthority();
String promQuery = job.getDataAccessEndPointUri().getQuery();
promQuery = promQuery.replace("query=", "");
if (job.getStartDateTime() == null){
job.setStartDateTime(OffsetDateTime.now());
}
if (job.getEndDateTime() == null){
job.setEndDateTime(job.getStartDateTime().plusHours(1));
}
if(job.getExecutionInterval() == null){
job.setExecutionInterval(180);
}
Job newPeriodicQuery = prometheusQueries.startPeriodicQuery(promURL, promQuery,job.getStartDateTime(), job.getEndDateTime(), job.getExecutionInterval());
if(newPeriodicQuery.getState()== ExecutionStateType.FAILED){
logger.atError().setMessage("Periodic query failed to start due to internal error.");
} else {
logger.atError().setMessage("Periodic query started, with ID: " + newPeriodicQuery.getUuid());
}
}
}
......@@ -19,7 +19,7 @@ public class JobMapperTest {
measurementCollectionJob.setUuid("9f22dc98-f439-4fdd-98e3-f6471cf8ca67");
measurementCollectionJob.setDataAccessEndpoint(dataAccessEndpointList);
measurementCollectionJob.set
return measurementCollectionJob;
}
......@@ -40,12 +40,5 @@ public class JobMapperTest {
return dataFilterMap;
}
public DataFilterMapItem dataFilterMapItemCreate(){
DataFilterMapItem dataFilterMapItem = new DataFilterMapItem();
dataFilterMapItem.setFilterTemplate();
}
}
package org.etsi.osl.metrico.services;
import org.etsi.osl.metrico.mapper.JobMapper;
import org.etsi.osl.metrico.model.Job;
import org.etsi.osl.metrico.prometheus.PrometheusQueries;
import org.etsi.osl.tmf.pm628.model.DataAccessEndpoint;
import org.etsi.osl.tmf.pm628.model.Granularity;
import org.etsi.osl.tmf.pm628.model.MeasurementCollectionJob;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import java.util.*;
import java.net.URI;
import java.net.URISyntaxException;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
public class MetricoServiceTest {
@Mock
private PrometheusQueries prometheusQueries;
@InjectMocks
private MetricoService metricoService = new MetricoService(prometheusQueries);
private MeasurementCollectionJob mcj;
private Job job;
@BeforeEach
public void setUp() throws URISyntaxException {
mcj = new MeasurementCollectionJob();
mcj.setUuid("123e4567-e89b-12d3-a456-426614174000");
mcj.granularity(Granularity.G_1M);
DataAccessEndpoint dae = new DataAccessEndpoint();
dae.setApiType("PROMETHEUS");
dae.setUri(new URI("http://150.140.195.195:9090/api/v1/query?query=cell_dl_bitrate"));
dae.setUuid("123e4567-e89b-12d3-a456-426614174011");
List<DataAccessEndpoint> daeList = new ArrayList<>();
daeList.add(dae);
mcj.setDataAccessEndpoint(daeList);
//job.setDataAccessEndPointUri(new URI("http://150.140.195.195:9090/api/v1/query?query=up"));
}
@Test
public void testQueryToPrometheus() {
String[] result = metricoService.queryToPrometheus(mcj);
System.out.println(Arrays.toString(result));
//assertEquals("OK", result);
}
@Test
public void testStartPeriodicQueryToPrometheus(){
metricoService.startPeriodicQueryToPrometheus(mcj);
}
}
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment