Merge branch '11-add-api-for-getting-home-page-data' into 'main'
Resolve "Add API for getting home page data" Closes #11 See merge request padas/24ss-5430-web-and-data-eng/gruppe-3/datadash!7
This commit is contained in:
commit
43d3ff17d5
@ -0,0 +1,103 @@
|
|||||||
|
package de.uni_passau.fim.PADAS.group3.DataDash.controler;
|
||||||
|
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.data.domain.Page;
|
||||||
|
import org.springframework.data.domain.PageRequest;
|
||||||
|
import org.springframework.web.bind.annotation.*;
|
||||||
|
import de.uni_passau.fim.PADAS.group3.DataDash.model.Dataset;
|
||||||
|
import de.uni_passau.fim.PADAS.group3.DataDash.model.DatasetService;
|
||||||
|
import de.uni_passau.fim.PADAS.group3.DataDash.model.Type;
|
||||||
|
import org.springframework.data.domain.Pageable;
|
||||||
|
import org.springframework.data.web.config.EnableSpringDataWebSupport;
|
||||||
|
import org.springframework.data.domain.Sort;
|
||||||
|
|
||||||
|
import java.util.UUID;
|
||||||
|
import org.springframework.web.bind.annotation.PostMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestBody;
|
||||||
|
|
||||||
|
@RestController
|
||||||
|
@RequestMapping("/api/v1/datasets")
|
||||||
|
@EnableSpringDataWebSupport
|
||||||
|
public class DatasetController {
|
||||||
|
@Autowired
|
||||||
|
private DatasetService datasetService;
|
||||||
|
|
||||||
|
// @GetMapping
|
||||||
|
// public List<Dataset> getAllDatasets() {
|
||||||
|
// return datasetService.getAllDatasets();
|
||||||
|
// }
|
||||||
|
|
||||||
|
@GetMapping("/id/{id}")
|
||||||
|
public Dataset getDatasetById(@PathVariable("id") UUID id) {
|
||||||
|
return datasetService.getDatasetById(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping
|
||||||
|
public Dataset createDataset(@RequestBody Dataset dataset) {
|
||||||
|
datasetService.addDataset(dataset);
|
||||||
|
// TODO: figure out what the fuck i need to do here
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// @PutMapping("/{id}")
|
||||||
|
// public Dataset updateDataset(@PathVariable("id") Long id, @RequestBody
|
||||||
|
// Dataset dataset) {
|
||||||
|
// return datasetService.updateDataset(id, dataset);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
|
||||||
|
@DeleteMapping("/id/{id}")
|
||||||
|
public void deleteDataset(@PathVariable("id") UUID id) {
|
||||||
|
datasetService.deleteDataset(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping("/id/{id}/upvote")
|
||||||
|
public Dataset upvote(@PathVariable("id") UUID id) {
|
||||||
|
datasetService.upvoteDataset(id);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping("/id/{id}/downvote")
|
||||||
|
public Dataset downvote(@PathVariable("id") UUID id) {
|
||||||
|
datasetService.downvoteDataset(id);
|
||||||
|
return null; // new ResponseEntity<>(null, HttpStatus.OK);
|
||||||
|
}
|
||||||
|
|
||||||
|
@PostMapping("/id/{id}/vote")
|
||||||
|
public String postMethodName(@PathVariable("id") UUID id,
|
||||||
|
@RequestParam("stars") int stars) {
|
||||||
|
if (stars > 0 && stars < 6) {
|
||||||
|
datasetService.voteDataset(id, stars);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return "Invalid vote";
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping
|
||||||
|
public Page<Dataset> getDatasetsByDateAfter(@RequestParam(value = "author", required = false) String author,
|
||||||
|
@RequestParam(value = "title", required = false) String title,
|
||||||
|
@RequestParam(value = "description", required = false) String description,
|
||||||
|
@RequestParam(value = "abst", required = false) String abst,
|
||||||
|
@RequestParam(value = "type", required = false) Type type,
|
||||||
|
@RequestParam(value = "min-raiting", required = false) Float raiting,
|
||||||
|
@RequestParam(value = "page", required = false, defaultValue = "0") int page,
|
||||||
|
@RequestParam(value = "size", required = false, defaultValue = "20") int size,
|
||||||
|
@RequestParam(value = "sort", required = false, defaultValue = "upvotes") String sort,
|
||||||
|
@RequestParam(value = "direction", required = false, defaultValue = "desc") String direction) {
|
||||||
|
Pageable pageable = PageRequest.of(page, size,
|
||||||
|
Sort.by(direction.equals("asc") ? Sort.Direction.ASC : Sort.Direction.DESC, sort));
|
||||||
|
return datasetService.getDatasetsByOptionalCriteria(title, description, author, abst, type, raiting, pageable);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GetMapping("/search")
|
||||||
|
public Page<Dataset> search(@RequestParam(value = "search", required = false, defaultValue = "%") String search,
|
||||||
|
@RequestParam(value = "page", required = false, defaultValue = "0") int page,
|
||||||
|
@RequestParam(value = "size", required = false, defaultValue = "20") int size,
|
||||||
|
@RequestParam(value = "sort", required = false, defaultValue = "upvotes") String sort,
|
||||||
|
@RequestParam(value = "direction", required = false, defaultValue = "desc") String direction) {
|
||||||
|
Pageable pageable = PageRequest.of(page, size,
|
||||||
|
Sort.by(direction.equals("asc") ? Sort.Direction.ASC : Sort.Direction.DESC, sort));
|
||||||
|
return datasetService.searchByOptionalCriteria(search, pageable);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -1,7 +1,9 @@
|
|||||||
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
||||||
|
|
||||||
import java.sql.Date;
|
import java.net.URL;
|
||||||
|
import java.time.LocalDate;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
import jakarta.persistence.Entity;
|
import jakarta.persistence.Entity;
|
||||||
import jakarta.persistence.EnumType;
|
import jakarta.persistence.EnumType;
|
||||||
import jakarta.persistence.Enumerated;
|
import jakarta.persistence.Enumerated;
|
||||||
@ -27,25 +29,31 @@ public class Dataset {
|
|||||||
|
|
||||||
private String author;
|
private String author;
|
||||||
|
|
||||||
private Date date;
|
private LocalDate date;
|
||||||
|
|
||||||
private float raiting;
|
private float raiting;
|
||||||
|
|
||||||
private int votes;
|
private int votes;
|
||||||
|
|
||||||
|
private int upvotes;
|
||||||
|
|
||||||
|
private URL url;
|
||||||
|
|
||||||
private String[] categories;
|
private String[] categories;
|
||||||
|
|
||||||
public Dataset(String title, String abst, String description, String author, Date date, String[] categories, Type type) {
|
public Dataset(String title, String abst, String description, String author, URL url, String[] categories, Type type) {
|
||||||
|
|
||||||
this.raiting = 0;
|
this.raiting = 0;
|
||||||
this.votes = 0;
|
this.votes = 0;
|
||||||
|
this.upvotes = 0;
|
||||||
setTitle(title);
|
setTitle(title);
|
||||||
setAbst(abst);
|
setAbst(abst);
|
||||||
setDescription(description);
|
setDescription(description);
|
||||||
setAuthor(author);
|
setAuthor(author);
|
||||||
setDate(date);
|
setDate(LocalDate.now());
|
||||||
setCategories(categories);
|
setCategories(categories);
|
||||||
setType(type);
|
setType(type);
|
||||||
|
setUrl(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Dataset() {
|
public Dataset() {
|
||||||
@ -64,7 +72,7 @@ public class Dataset {
|
|||||||
return categories;
|
return categories;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Date getDate() {
|
public LocalDate getDate() {
|
||||||
return date;
|
return date;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -92,6 +100,14 @@ public class Dataset {
|
|||||||
return votes;
|
return votes;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public int getUpvotes() {
|
||||||
|
return upvotes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public URL getUrl() {
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
public void setAbst(String abst) {
|
public void setAbst(String abst) {
|
||||||
this.abst = abst.substring(0, Math.min(abst.length(), 100));
|
this.abst = abst.substring(0, Math.min(abst.length(), 100));
|
||||||
}
|
}
|
||||||
@ -104,13 +120,17 @@ public class Dataset {
|
|||||||
this.categories = categories;
|
this.categories = categories;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setDate(Date date) {
|
public void setDate(LocalDate localDate) {
|
||||||
this.date = date;
|
this.date = localDate;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setDescription(String description) {
|
public void setDescription(String description) {
|
||||||
this.description = description;
|
this.description = description;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void setUrl(URL url) {
|
||||||
|
this.url = url;
|
||||||
|
}
|
||||||
|
|
||||||
public void setTitle(String title) {
|
public void setTitle(String title) {
|
||||||
this.title = title.substring(0, Math.min(title.length(), 50));
|
this.title = title.substring(0, Math.min(title.length(), 50));
|
||||||
@ -124,4 +144,12 @@ public class Dataset {
|
|||||||
raiting = (raiting*votes + stars) / (++votes);
|
raiting = (raiting*votes + stars) / (++votes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void upvote() {
|
||||||
|
upvotes++;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void downvote() {
|
||||||
|
upvotes--;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,102 @@
|
|||||||
|
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import org.springframework.data.domain.Pageable;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
import org.springframework.data.domain.Page;
|
||||||
|
|
||||||
|
|
||||||
|
@Service
|
||||||
|
public class DatasetService {
|
||||||
|
private dataRepository datasetRepository;
|
||||||
|
|
||||||
|
public DatasetService(dataRepository datasetRepository) {
|
||||||
|
this.datasetRepository = datasetRepository;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Dataset> getAllDatasets() {
|
||||||
|
return datasetRepository.findAll();
|
||||||
|
}
|
||||||
|
|
||||||
|
public Dataset getDatasetById(UUID id) {
|
||||||
|
return datasetRepository.getDatasetById(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addDataset(Dataset dataset) {
|
||||||
|
datasetRepository.save(dataset);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void updateDatasetTitle(UUID id, String title) {
|
||||||
|
datasetRepository.getDatasetById(id).setTitle(title);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void voteDataset(UUID id, int vote) {
|
||||||
|
Dataset dataset = datasetRepository.getDatasetById(id);
|
||||||
|
dataset.vote(vote);
|
||||||
|
datasetRepository.save(dataset);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void deleteDataset(UUID id) {
|
||||||
|
Dataset dataset = datasetRepository.getDatasetById(id);
|
||||||
|
datasetRepository.delete(dataset);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Dataset> getDatasetsByTitle(String title) {
|
||||||
|
return datasetRepository.findByTitle(title);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Dataset> getDatasetsByTitleLike(String title) {
|
||||||
|
return datasetRepository.findByTitleLike(title);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Dataset> findByDescriptionLike(String description) {
|
||||||
|
return datasetRepository.findByDescriptionLike(description);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Dataset> getDatasetsByAuthorLike(String author) {
|
||||||
|
return datasetRepository.findByAuthorLike(author);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Dataset> getDatasetsByType(Type type) {
|
||||||
|
return datasetRepository.findByType(type);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Dataset> getDatasetsByAbstLike(String abst) {
|
||||||
|
return datasetRepository.findByAbstLike(abst);
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<Dataset> getDatasetsByRaitingGreaterThan(float raiting) {
|
||||||
|
return datasetRepository.findByRaitingGreaterThan(raiting);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void upvoteDataset(UUID id) {
|
||||||
|
Dataset dataset = datasetRepository.getDatasetById(id);
|
||||||
|
dataset.upvote();
|
||||||
|
datasetRepository.save(dataset);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void downvoteDataset(UUID id) {
|
||||||
|
Dataset dataset = datasetRepository.getDatasetById(id);
|
||||||
|
dataset.downvote();
|
||||||
|
datasetRepository.save(dataset);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Page<Dataset> getDatasetsByOptionalCriteria(String title, String description, String author, String abst,
|
||||||
|
Type type, Float raiting, Pageable pageable) {
|
||||||
|
String[] categories = null;
|
||||||
|
return datasetRepository.findByOptionalCriteria(Optional.ofNullable(title), Optional.ofNullable(description),
|
||||||
|
Optional.ofNullable(author), Optional.ofNullable(abst), Optional.ofNullable(type),
|
||||||
|
Optional.ofNullable(categories), Optional.ofNullable(raiting), pageable);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Page<Dataset> searchByOptionalCriteria(String search, Pageable pageable) {
|
||||||
|
if (search.equals("%")) {
|
||||||
|
System.out.println("searching for all datasets");
|
||||||
|
return datasetRepository.findAll(pageable);
|
||||||
|
}
|
||||||
|
return datasetRepository.searchByOptionalCriteria(Optional.ofNullable(search), pageable);
|
||||||
|
}
|
||||||
|
}
|
@ -1,5 +1,6 @@
|
|||||||
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
||||||
|
|
||||||
|
import java.net.URL;
|
||||||
import java.sql.Date;
|
import java.sql.Date;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
@ -22,7 +23,7 @@ public class LoadDummyDatabase {
|
|||||||
|
|
||||||
return args -> {
|
return args -> {
|
||||||
for (int i = 0; i < 100; i++) {
|
for (int i = 0; i < 100; i++) {
|
||||||
Dataset dataset = new Dataset("Title" + i, "Abst" + i, "Description" + i, "Author" + i, new Date(0), new String[]{"Category" + i}, Type.API);
|
Dataset dataset = new Dataset("Title" + i, "Abst" + i, "Description" + i, "Author" + i,null, new String[]{"Category" + i}, Type.API);
|
||||||
repository.save(dataset);
|
repository.save(dataset);
|
||||||
log.info("Preloading" + repository.save(dataset));
|
log.info("Preloading" + repository.save(dataset));
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,20 @@
|
|||||||
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
package de.uni_passau.fim.PADAS.group3.DataDash.model;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
import java.sql.Date;
|
import java.sql.Date;
|
||||||
|
|
||||||
|
import org.springframework.data.domain.Page;
|
||||||
|
import org.springframework.data.domain.Pageable;
|
||||||
import org.springframework.data.jpa.repository.JpaRepository;
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
|
||||||
|
|
||||||
public interface dataRepository extends JpaRepository<Dataset, UUID>{
|
public interface dataRepository extends JpaRepository<Dataset, UUID>{
|
||||||
|
|
||||||
|
Dataset getDatasetById(UUID id);
|
||||||
List<Dataset> findByTitle(String title);
|
List<Dataset> findByTitle(String title);
|
||||||
List<Dataset> findByTitleLike(String title);
|
List<Dataset> findByTitleLike(String title);
|
||||||
List<Dataset> findByAuthorLike(String author);
|
List<Dataset> findByAuthorLike(String author);
|
||||||
@ -20,5 +28,32 @@ public interface dataRepository extends JpaRepository<Dataset, UUID>{
|
|||||||
List<Dataset> findByDateAfter(Date date);
|
List<Dataset> findByDateAfter(Date date);
|
||||||
List<Dataset> findByDateBefore(Date date);
|
List<Dataset> findByDateBefore(Date date);
|
||||||
List<Dataset> findByDateBetween(Date date1, Date date2);
|
List<Dataset> findByDateBetween(Date date1, Date date2);
|
||||||
|
@SuppressWarnings("null")
|
||||||
|
Page<Dataset> findAll(Pageable pageable);
|
||||||
|
|
||||||
|
@Query("SELECT d FROM Dataset d WHERE " +
|
||||||
|
"(COALESCE(:title, '') = '' OR d.title LIKE :title) AND " +
|
||||||
|
"(COALESCE(:description, '') = '' OR d.description LIKE :description) AND" +
|
||||||
|
"(COALESCE(:author, '') = '' OR d.author LIKE :author) AND" +
|
||||||
|
"(COALESCE(:abst, '') = '' OR d.abst LIKE :abst) AND" +
|
||||||
|
"(:type IS NULL OR d.type = :type) AND"+
|
||||||
|
"(:categories IS NULL OR d.categories = :categories) AND" +
|
||||||
|
"(:raiting IS NULL OR d.raiting > :raiting)")
|
||||||
|
Page<Dataset>findByOptionalCriteria(@Param("title") Optional<String> title,
|
||||||
|
@Param("description") Optional<String> description,
|
||||||
|
@Param("author") Optional<String> author,
|
||||||
|
@Param("abst") Optional<String> abst,
|
||||||
|
@Param("type") Optional<Type> type,
|
||||||
|
@Param("categories") Optional<String[]> categories,
|
||||||
|
@Param("raiting") Optional<Float> raiting,
|
||||||
|
Pageable pageable);
|
||||||
|
|
||||||
|
|
||||||
|
@Query("SELECT d FROM Dataset d WHERE " +
|
||||||
|
"(LOWER(d.title) LIKE LOWER(:search)) OR " +
|
||||||
|
"(LOWER(d.description) LIKE LOWER(:search)) OR " +
|
||||||
|
"(LOWER(d.author) LIKE LOWER(:search))")
|
||||||
|
Page<Dataset>searchByOptionalCriteria(@Param("search") Optional<String> search,
|
||||||
|
Pageable pageable);
|
||||||
|
|
||||||
}
|
}
|
14
src/main/resources/static/contentUtility.js
Normal file
14
src/main/resources/static/contentUtility.js
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
import { searchBarTimeout } from "./main.js"
|
||||||
|
|
||||||
|
export function fetchQuery(fetchString) {
|
||||||
|
clearTimeout(searchBarTimeout);
|
||||||
|
fetch(fetchString)
|
||||||
|
.then(resp => resp.json())
|
||||||
|
.then((data) => {
|
||||||
|
parseContent(data.content);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseContent(content) {
|
||||||
|
//TODO: method for parsing query results
|
||||||
|
}
|
127
src/main/resources/static/main.js
Normal file
127
src/main/resources/static/main.js
Normal file
@ -0,0 +1,127 @@
|
|||||||
|
import { fetchQuery } from "./contentUtility.js";
|
||||||
|
|
||||||
|
const apiEndpoint = "/api/v1/datasets";
|
||||||
|
const baseURL = location.origin;
|
||||||
|
const defaultPagingValue = 20;
|
||||||
|
const lastQuery = {
|
||||||
|
url: "",
|
||||||
|
totalPages: 0,
|
||||||
|
currentPage: 0
|
||||||
|
};
|
||||||
|
|
||||||
|
// definition of all buttons
|
||||||
|
const addButton = document.getElementById("add-btn");
|
||||||
|
const filterButton = document.getElementById("filter-btn");
|
||||||
|
const searchButton = document.getElementById("search-btn");
|
||||||
|
const searchBar = document.getElementById("search-entry");
|
||||||
|
const sortButton = document.getElementById("sort-btn");
|
||||||
|
const upvoteButtons = document.getElementsByClassName("upvote-btn");
|
||||||
|
const downvoteButtons = document.getElementsByClassName("downvote-btn");
|
||||||
|
|
||||||
|
// ID of the timeout, because we need to cancel it at some point
|
||||||
|
export let searchBarTimeout;
|
||||||
|
|
||||||
|
// Event listeners
|
||||||
|
addButton.addEventListener("click", () => {
|
||||||
|
navigateToAdd();
|
||||||
|
});
|
||||||
|
|
||||||
|
filterButton.addEventListener("change", () => {
|
||||||
|
const filterString = filterButton.value;
|
||||||
|
filter(filterString);
|
||||||
|
});
|
||||||
|
|
||||||
|
searchButton.addEventListener("click", () => {
|
||||||
|
const searchString = searchBar.value;
|
||||||
|
search(searchString);
|
||||||
|
});
|
||||||
|
|
||||||
|
searchBar.addEventListener("input", () => {
|
||||||
|
clearTimeout(searchBarTimeout);
|
||||||
|
searchBarTimeout = setTimeout(() => {
|
||||||
|
const searchString = searchBar.value;
|
||||||
|
search(searchString);
|
||||||
|
}, 1000);
|
||||||
|
});
|
||||||
|
|
||||||
|
searchBar.addEventListener('keypress', function (e) {
|
||||||
|
if (e.key === 'Enter') {
|
||||||
|
const searchString = searchBar.value;
|
||||||
|
search(searchString);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
sortButton.addEventListener("change", () => {
|
||||||
|
const sortString = sortButton.value;
|
||||||
|
sort(sortString);
|
||||||
|
});
|
||||||
|
|
||||||
|
const upvoteButtonClickListener = e => {
|
||||||
|
const entryID = e.target.parentElement.parentElement.dataset.id;
|
||||||
|
vote(entryID, true);
|
||||||
|
};
|
||||||
|
for (const upvoteButton of upvoteButtons) {
|
||||||
|
upvoteButton.addEventListener("click", upvoteButtonClickListener);
|
||||||
|
}
|
||||||
|
|
||||||
|
const downvoteButtonClickListener = e => {
|
||||||
|
const entryID = e.target.parentElement.parentElement.dataset.id;
|
||||||
|
vote(entryID, false);
|
||||||
|
};
|
||||||
|
for (const downvoteButton of downvoteButtons) {
|
||||||
|
downvoteButton.addEventListener("click", downvoteButtonClickListener);
|
||||||
|
}
|
||||||
|
|
||||||
|
// functions of the main page
|
||||||
|
function navigateToAdd() {
|
||||||
|
//TODO: url to add page not yet implemented, add here
|
||||||
|
}
|
||||||
|
|
||||||
|
function filter(filterString) {
|
||||||
|
filterString = filterString.toUpperCase();
|
||||||
|
|
||||||
|
let fetchURL = new URL(apiEndpoint, baseURL);
|
||||||
|
fetchURL.searchParams.append("type", filterString);
|
||||||
|
fetchURL.searchParams.append("size", defaultPagingValue);
|
||||||
|
|
||||||
|
console.log(fetchURL); // TODO: remove
|
||||||
|
fetchQuery(fetchURL);
|
||||||
|
}
|
||||||
|
|
||||||
|
function search(searchString) {
|
||||||
|
let fetchURL = new URL(apiEndpoint + "/search", baseURL);
|
||||||
|
fetchURL.searchParams.append("search", searchString.length == 0 ? "%" : searchString);
|
||||||
|
|
||||||
|
console.log(fetchURL); // TODO: remove
|
||||||
|
fetchQuery(fetchURL);
|
||||||
|
}
|
||||||
|
|
||||||
|
function sort(sortString) {
|
||||||
|
let query = sortString.toLowerCase().split(" ");
|
||||||
|
if (query[1] === "a-z" || query[1] === "↑") {
|
||||||
|
query[1] = "asc";
|
||||||
|
} else {
|
||||||
|
query[1] = "desc";
|
||||||
|
}
|
||||||
|
|
||||||
|
let fetchURL = new URL(apiEndpoint, baseURL);
|
||||||
|
fetchURL.searchParams.append("sort", query[0]);
|
||||||
|
fetchURL.searchParams.append("direction", query[1]);
|
||||||
|
|
||||||
|
console.log(fetchURL); // TODO: remove
|
||||||
|
fetchQuery(fetchURL);
|
||||||
|
}
|
||||||
|
|
||||||
|
function vote(entryID, up) {
|
||||||
|
const fetchURL = new URL(
|
||||||
|
`${apiEndpoint}/id/${entryID}/${up ? "up" : "down"}vote`,
|
||||||
|
baseURL,
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(fetchURL); // TODO: remove
|
||||||
|
fetch(fetchURL);
|
||||||
|
}
|
||||||
|
|
||||||
|
function incrementPageCount() {
|
||||||
|
lastQuery.currentPage++;
|
||||||
|
}
|
@ -5,9 +5,10 @@
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
<title>DataDash</title>
|
<title>DataDash</title>
|
||||||
<link rel="stylesheet" href="main.css">
|
<link rel="stylesheet" href="main.css">
|
||||||
|
<script type="module" src="main.js" defer></script>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div onclick="console.log('add')" id="add-btn" title="Add a new API entry"></div>
|
<div id="add-btn" title="Add a new API entry"></div>
|
||||||
<main>
|
<main>
|
||||||
<header>
|
<header>
|
||||||
<h1>Welcome to DataDash</h1>
|
<h1>Welcome to DataDash</h1>
|
||||||
@ -16,17 +17,23 @@
|
|||||||
|
|
||||||
<section id="tool-bar">
|
<section id="tool-bar">
|
||||||
<select id="sort-btn" class="btn flat" title="Sort entries">Sort by
|
<select id="sort-btn" class="btn flat" title="Sort entries">Sort by
|
||||||
<option>Option 1</option>
|
<option>Author A-Z</option>
|
||||||
<option>Option 2</option>
|
<option>Author Z-A</option>
|
||||||
|
<option>Title A-Z</option>
|
||||||
|
<option>Title Z-A</option>
|
||||||
|
<option>Stars ↑</option>
|
||||||
|
<option>Stars ↓</option>
|
||||||
|
<option>Votes ↑</option>
|
||||||
|
<option>Votes ↓</option>
|
||||||
</select>
|
</select>
|
||||||
<div class="divider"></div>
|
<div class="divider"></div>
|
||||||
<select class="btn flat" id="filter-btn" title="Filter entries">Filter
|
<select class="btn flat" id="filter-btn" title="Filter entries">Filter
|
||||||
<optgroup label="Standard categories">
|
<optgroup label="Standard categories">
|
||||||
<option>Option 1</option>
|
<option>Dataset</option>
|
||||||
<option>Option 2</option>
|
<option>API</option>
|
||||||
</optgroup>
|
</optgroup>
|
||||||
<optgroup label="User categories">
|
<optgroup label="Other categories">
|
||||||
<option>user category</option>
|
<option>a category</option>
|
||||||
</optgroup>
|
</optgroup>
|
||||||
</select>
|
</select>
|
||||||
<input type="search" name="query" id="search-entry" placeholder="Search">
|
<input type="search" name="query" id="search-entry" placeholder="Search">
|
||||||
@ -37,7 +44,7 @@
|
|||||||
<h2>Recently added:</h2>
|
<h2>Recently added:</h2>
|
||||||
<ul class="datasets">
|
<ul class="datasets">
|
||||||
<!-- Preliminary content to be replaced by data from our server: -->
|
<!-- Preliminary content to be replaced by data from our server: -->
|
||||||
<li class="dataset">
|
<li class="dataset" data-id="">
|
||||||
<div class="dataset-info">
|
<div class="dataset-info">
|
||||||
<div class="icon standup"></div>
|
<div class="icon standup"></div>
|
||||||
<div class="details">
|
<div class="details">
|
||||||
|
Loading…
Reference in New Issue
Block a user